import numpy as np
import pandas as pd
import os, cv2, zipfile, shutil, h5py
import matplotlib.pyplot as plt
%config InlineBackend.figure_format = 'retina'
from tqdm import tqdm
from os.path import isfile, isdir
from sklearn.utils import shuffle
# Set random seed for Keras
np.random.seed(42)
from keras.models import *
from keras.layers import *
from keras.callbacks import *
from keras.applications import *
from keras.preprocessing.image import ImageDataGenerator
from keras.applications.imagenet_utils import preprocess_input
from IPython.display import SVG
from keras.utils.vis_utils import model_to_dot
Using TensorFlow backend.
解压前先从kaggle下载train.zip(训练集)和test.zip(测试集)到当前目录
def unzip_file(f_path):
fzip = zipfile.ZipFile(f_path)
fzip.extractall()
train = './train.zip'
train_folder = './train'
test = './test.zip'
test_folder = './test'
if not isdir(train_folder):
unzip_file(train)
if not isdir(test_folder):
unzip_file(test)
获取每张图片的分辨率以及通道数
train_info = []
for img in tqdm(os.listdir(train_folder)):
info = cv2.imread(os.path.join(train_folder, img))
train_info.append((info.shape, img))
100%|██████████| 25000/25000 [00:56<00:00, 442.96it/s]
train_info[0]
((335, 499, 3), 'dog.523.jpg')
plt.figure(figsize=(10, 4))
img_size = [info[0][0]*info[0][1] for info in train_info]
plt.title("Image Size(W*H) Distribution")
plt.xlabel('Image Size(W*H)')
plt.ylabel('Amount')
plt.hist(img_size, bins = 20, alpha = 0.5, range = (0, 300000), color = 'red', label = 'image_size')
plt.show()
plt.figure(figsize=(10, 8))
for i, file in enumerate(os.listdir(train_folder)[:12]):
img = plt.imread(os.path.join(train_folder, file))
plt.subplot(3, 4, i+1)
plt.title(file.split('.')[0])
plt.axis('on')
plt.imshow(img)
plt.show()
TRAIN_GEN = './train_gen'
TRAIN_GEN_DOG = './train_gen/dog'
TRAIN_GEN_CAT = './train_gen/cat'
VAL_GEN = './val_gen'
VAL_GEN_DOG = './val_gen/dog'
VAL_GEN_CAT = './val_gen/cat'
TEST_GEN = './test_gen'
TEST_GEN_MIXED = './test_gen/mixed'
train_cat = [file for file in os.listdir(train_folder) if 'cat' in file][:]
train_dog = [file for file in os.listdir(train_folder) if 'dog' in file][:]
val_num = int(len(train_dog) * .2)
val_dog = train_dog[-val_num:]
train_dog = train_dog[:-val_num]
val_cat = train_cat[-val_num:]
train_cat = train_cat[:-val_num]
print('There are {} train dogs and {} validation dogs'.format(len(train_dog), len(val_dog)))
There are 10000 train dogs and 2500 validation dogs
if not isdir(TRAIN_GEN):
os.mkdir(TRAIN_GEN)
os.mkdir(TRAIN_GEN_DOG)
os.mkdir(TRAIN_GEN_CAT)
for file in train_dog:
os.symlink('../../train/'+file, TRAIN_GEN_DOG+r'/'+file)
for file in train_cat:
os.symlink('../../train/'+file, TRAIN_GEN_CAT+r'/'+file)
if not isdir(VAL_GEN):
os.mkdir(VAL_GEN)
os.mkdir(VAL_GEN_DOG)
os.mkdir(VAL_GEN_CAT)
for file in val_dog:
os.symlink('../../train/'+file, VAL_GEN_DOG+r'/'+file)
for file in val_cat:
os.symlink('../../train/'+file, VAL_GEN_CAT+r'/'+file)
if not isdir(TEST_GEN):
os.mkdir(TEST_GEN)
os.mkdir(TEST_GEN_MIXED)
for file in os.listdir(test_folder)[:]:
os.symlink('../../test/'+file, TEST_GEN_MIXED+r'/'+file)
为特征提取创建训练集目录
TRAIN_GEN_EX = './train_gen_ex'
TRAIN_GEN_EX_DOG = './train_gen_ex/dog'
TRAIN_GEN_EX_CAT = './train_gen_ex/cat'
TEST_GEN = './test_gen'
TEST_GEN_MIXED = './test_gen/mixed'
train_cat = [file for file in os.listdir(train_folder) if 'cat' in file][:]
train_dog = [file for file in os.listdir(train_folder) if 'dog' in file][:]
print('There are {} train dogs'.format(len(train_dog)))
There are 12500 train dogs
if not isdir(TRAIN_GEN_EX):
os.mkdir(TRAIN_GEN_EX)
os.mkdir(TRAIN_GEN_EX_DOG)
os.mkdir(TRAIN_GEN_EX_CAT)
for file in train_dog:
os.symlink('../../train/'+file, TRAIN_GEN_EX_DOG+r'/'+file)
for file in train_cat:
os.symlink('../../train/'+file, TRAIN_GEN_EX_CAT+r'/'+file)
类方法包括创建图片数据生成器、构建和设置预训练模型、训练、预测
from transfer_learning import *
tl_resnet50 = TransferLearning_CNN(
train_dir=TRAIN_GEN,
val_dir=VAL_GEN,
test_dir=TEST_GEN,
model_name='resnet50',
fine_tune_layer=20,
epochs=8,
patience=5,
batch_size=128,
img_sz=(224,224))
Found 1600 images belonging to 2 classes. Found 400 images belonging to 2 classes. Found 1000 images belonging to 1 classes.
tl_resnet50.model_summary()
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) (None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv1 (Conv2D) (None, 112, 112, 64) 9472 input_1[0][0]
__________________________________________________________________________________________________
bn_conv1 (BatchNormalization) (None, 112, 112, 64) 256 conv1[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 112, 112, 64) 0 bn_conv1[0][0]
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D) (None, 55, 55, 64) 0 activation_1[0][0]
__________________________________________________________________________________________________
res2a_branch2a (Conv2D) (None, 55, 55, 64) 4160 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
bn2a_branch2a (BatchNormalizati (None, 55, 55, 64) 256 res2a_branch2a[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 55, 55, 64) 0 bn2a_branch2a[0][0]
__________________________________________________________________________________________________
res2a_branch2b (Conv2D) (None, 55, 55, 64) 36928 activation_2[0][0]
__________________________________________________________________________________________________
bn2a_branch2b (BatchNormalizati (None, 55, 55, 64) 256 res2a_branch2b[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 55, 55, 64) 0 bn2a_branch2b[0][0]
__________________________________________________________________________________________________
res2a_branch2c (Conv2D) (None, 55, 55, 256) 16640 activation_3[0][0]
__________________________________________________________________________________________________
res2a_branch1 (Conv2D) (None, 55, 55, 256) 16640 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
bn2a_branch2c (BatchNormalizati (None, 55, 55, 256) 1024 res2a_branch2c[0][0]
__________________________________________________________________________________________________
bn2a_branch1 (BatchNormalizatio (None, 55, 55, 256) 1024 res2a_branch1[0][0]
__________________________________________________________________________________________________
add_1 (Add) (None, 55, 55, 256) 0 bn2a_branch2c[0][0]
bn2a_branch1[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 55, 55, 256) 0 add_1[0][0]
__________________________________________________________________________________________________
res2b_branch2a (Conv2D) (None, 55, 55, 64) 16448 activation_4[0][0]
__________________________________________________________________________________________________
bn2b_branch2a (BatchNormalizati (None, 55, 55, 64) 256 res2b_branch2a[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 55, 55, 64) 0 bn2b_branch2a[0][0]
__________________________________________________________________________________________________
res2b_branch2b (Conv2D) (None, 55, 55, 64) 36928 activation_5[0][0]
__________________________________________________________________________________________________
bn2b_branch2b (BatchNormalizati (None, 55, 55, 64) 256 res2b_branch2b[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 55, 55, 64) 0 bn2b_branch2b[0][0]
__________________________________________________________________________________________________
res2b_branch2c (Conv2D) (None, 55, 55, 256) 16640 activation_6[0][0]
__________________________________________________________________________________________________
bn2b_branch2c (BatchNormalizati (None, 55, 55, 256) 1024 res2b_branch2c[0][0]
__________________________________________________________________________________________________
add_2 (Add) (None, 55, 55, 256) 0 bn2b_branch2c[0][0]
activation_4[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 55, 55, 256) 0 add_2[0][0]
__________________________________________________________________________________________________
res2c_branch2a (Conv2D) (None, 55, 55, 64) 16448 activation_7[0][0]
__________________________________________________________________________________________________
bn2c_branch2a (BatchNormalizati (None, 55, 55, 64) 256 res2c_branch2a[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 55, 55, 64) 0 bn2c_branch2a[0][0]
__________________________________________________________________________________________________
res2c_branch2b (Conv2D) (None, 55, 55, 64) 36928 activation_8[0][0]
__________________________________________________________________________________________________
bn2c_branch2b (BatchNormalizati (None, 55, 55, 64) 256 res2c_branch2b[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 55, 55, 64) 0 bn2c_branch2b[0][0]
__________________________________________________________________________________________________
res2c_branch2c (Conv2D) (None, 55, 55, 256) 16640 activation_9[0][0]
__________________________________________________________________________________________________
bn2c_branch2c (BatchNormalizati (None, 55, 55, 256) 1024 res2c_branch2c[0][0]
__________________________________________________________________________________________________
add_3 (Add) (None, 55, 55, 256) 0 bn2c_branch2c[0][0]
activation_7[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 55, 55, 256) 0 add_3[0][0]
__________________________________________________________________________________________________
res3a_branch2a (Conv2D) (None, 28, 28, 128) 32896 activation_10[0][0]
__________________________________________________________________________________________________
bn3a_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3a_branch2a[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 28, 28, 128) 0 bn3a_branch2a[0][0]
__________________________________________________________________________________________________
res3a_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_11[0][0]
__________________________________________________________________________________________________
bn3a_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3a_branch2b[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 28, 28, 128) 0 bn3a_branch2b[0][0]
__________________________________________________________________________________________________
res3a_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_12[0][0]
__________________________________________________________________________________________________
res3a_branch1 (Conv2D) (None, 28, 28, 512) 131584 activation_10[0][0]
__________________________________________________________________________________________________
bn3a_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3a_branch2c[0][0]
__________________________________________________________________________________________________
bn3a_branch1 (BatchNormalizatio (None, 28, 28, 512) 2048 res3a_branch1[0][0]
__________________________________________________________________________________________________
add_4 (Add) (None, 28, 28, 512) 0 bn3a_branch2c[0][0]
bn3a_branch1[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 28, 28, 512) 0 add_4[0][0]
__________________________________________________________________________________________________
res3b_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_13[0][0]
__________________________________________________________________________________________________
bn3b_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3b_branch2a[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 28, 28, 128) 0 bn3b_branch2a[0][0]
__________________________________________________________________________________________________
res3b_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_14[0][0]
__________________________________________________________________________________________________
bn3b_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3b_branch2b[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 28, 28, 128) 0 bn3b_branch2b[0][0]
__________________________________________________________________________________________________
res3b_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_15[0][0]
__________________________________________________________________________________________________
bn3b_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3b_branch2c[0][0]
__________________________________________________________________________________________________
add_5 (Add) (None, 28, 28, 512) 0 bn3b_branch2c[0][0]
activation_13[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 28, 28, 512) 0 add_5[0][0]
__________________________________________________________________________________________________
res3c_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_16[0][0]
__________________________________________________________________________________________________
bn3c_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3c_branch2a[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 28, 28, 128) 0 bn3c_branch2a[0][0]
__________________________________________________________________________________________________
res3c_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_17[0][0]
__________________________________________________________________________________________________
bn3c_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3c_branch2b[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 28, 28, 128) 0 bn3c_branch2b[0][0]
__________________________________________________________________________________________________
res3c_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_18[0][0]
__________________________________________________________________________________________________
bn3c_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3c_branch2c[0][0]
__________________________________________________________________________________________________
add_6 (Add) (None, 28, 28, 512) 0 bn3c_branch2c[0][0]
activation_16[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 28, 28, 512) 0 add_6[0][0]
__________________________________________________________________________________________________
res3d_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_19[0][0]
__________________________________________________________________________________________________
bn3d_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3d_branch2a[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 28, 28, 128) 0 bn3d_branch2a[0][0]
__________________________________________________________________________________________________
res3d_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_20[0][0]
__________________________________________________________________________________________________
bn3d_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3d_branch2b[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 28, 28, 128) 0 bn3d_branch2b[0][0]
__________________________________________________________________________________________________
res3d_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_21[0][0]
__________________________________________________________________________________________________
bn3d_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3d_branch2c[0][0]
__________________________________________________________________________________________________
add_7 (Add) (None, 28, 28, 512) 0 bn3d_branch2c[0][0]
activation_19[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 28, 28, 512) 0 add_7[0][0]
__________________________________________________________________________________________________
res4a_branch2a (Conv2D) (None, 14, 14, 256) 131328 activation_22[0][0]
__________________________________________________________________________________________________
bn4a_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4a_branch2a[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 14, 14, 256) 0 bn4a_branch2a[0][0]
__________________________________________________________________________________________________
res4a_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_23[0][0]
__________________________________________________________________________________________________
bn4a_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4a_branch2b[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 14, 14, 256) 0 bn4a_branch2b[0][0]
__________________________________________________________________________________________________
res4a_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_24[0][0]
__________________________________________________________________________________________________
res4a_branch1 (Conv2D) (None, 14, 14, 1024) 525312 activation_22[0][0]
__________________________________________________________________________________________________
bn4a_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4a_branch2c[0][0]
__________________________________________________________________________________________________
bn4a_branch1 (BatchNormalizatio (None, 14, 14, 1024) 4096 res4a_branch1[0][0]
__________________________________________________________________________________________________
add_8 (Add) (None, 14, 14, 1024) 0 bn4a_branch2c[0][0]
bn4a_branch1[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 14, 14, 1024) 0 add_8[0][0]
__________________________________________________________________________________________________
res4b_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_25[0][0]
__________________________________________________________________________________________________
bn4b_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4b_branch2a[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 14, 14, 256) 0 bn4b_branch2a[0][0]
__________________________________________________________________________________________________
res4b_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_26[0][0]
__________________________________________________________________________________________________
bn4b_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4b_branch2b[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 14, 14, 256) 0 bn4b_branch2b[0][0]
__________________________________________________________________________________________________
res4b_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_27[0][0]
__________________________________________________________________________________________________
bn4b_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4b_branch2c[0][0]
__________________________________________________________________________________________________
add_9 (Add) (None, 14, 14, 1024) 0 bn4b_branch2c[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 14, 14, 1024) 0 add_9[0][0]
__________________________________________________________________________________________________
res4c_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_28[0][0]
__________________________________________________________________________________________________
bn4c_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4c_branch2a[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 14, 14, 256) 0 bn4c_branch2a[0][0]
__________________________________________________________________________________________________
res4c_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_29[0][0]
__________________________________________________________________________________________________
bn4c_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4c_branch2b[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 14, 14, 256) 0 bn4c_branch2b[0][0]
__________________________________________________________________________________________________
res4c_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_30[0][0]
__________________________________________________________________________________________________
bn4c_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4c_branch2c[0][0]
__________________________________________________________________________________________________
add_10 (Add) (None, 14, 14, 1024) 0 bn4c_branch2c[0][0]
activation_28[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 14, 14, 1024) 0 add_10[0][0]
__________________________________________________________________________________________________
res4d_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_31[0][0]
__________________________________________________________________________________________________
bn4d_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4d_branch2a[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 14, 14, 256) 0 bn4d_branch2a[0][0]
__________________________________________________________________________________________________
res4d_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_32[0][0]
__________________________________________________________________________________________________
bn4d_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4d_branch2b[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 14, 14, 256) 0 bn4d_branch2b[0][0]
__________________________________________________________________________________________________
res4d_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_33[0][0]
__________________________________________________________________________________________________
bn4d_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4d_branch2c[0][0]
__________________________________________________________________________________________________
add_11 (Add) (None, 14, 14, 1024) 0 bn4d_branch2c[0][0]
activation_31[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 14, 14, 1024) 0 add_11[0][0]
__________________________________________________________________________________________________
res4e_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_34[0][0]
__________________________________________________________________________________________________
bn4e_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4e_branch2a[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 14, 14, 256) 0 bn4e_branch2a[0][0]
__________________________________________________________________________________________________
res4e_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_35[0][0]
__________________________________________________________________________________________________
bn4e_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4e_branch2b[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 14, 14, 256) 0 bn4e_branch2b[0][0]
__________________________________________________________________________________________________
res4e_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_36[0][0]
__________________________________________________________________________________________________
bn4e_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4e_branch2c[0][0]
__________________________________________________________________________________________________
add_12 (Add) (None, 14, 14, 1024) 0 bn4e_branch2c[0][0]
activation_34[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 14, 14, 1024) 0 add_12[0][0]
__________________________________________________________________________________________________
res4f_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_37[0][0]
__________________________________________________________________________________________________
bn4f_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4f_branch2a[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 14, 14, 256) 0 bn4f_branch2a[0][0]
__________________________________________________________________________________________________
res4f_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_38[0][0]
__________________________________________________________________________________________________
bn4f_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4f_branch2b[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 14, 14, 256) 0 bn4f_branch2b[0][0]
__________________________________________________________________________________________________
res4f_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_39[0][0]
__________________________________________________________________________________________________
bn4f_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4f_branch2c[0][0]
__________________________________________________________________________________________________
add_13 (Add) (None, 14, 14, 1024) 0 bn4f_branch2c[0][0]
activation_37[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 14, 14, 1024) 0 add_13[0][0]
__________________________________________________________________________________________________
res5a_branch2a (Conv2D) (None, 7, 7, 512) 524800 activation_40[0][0]
__________________________________________________________________________________________________
bn5a_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5a_branch2a[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 7, 7, 512) 0 bn5a_branch2a[0][0]
__________________________________________________________________________________________________
res5a_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_41[0][0]
__________________________________________________________________________________________________
bn5a_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5a_branch2b[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 7, 7, 512) 0 bn5a_branch2b[0][0]
__________________________________________________________________________________________________
res5a_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_42[0][0]
__________________________________________________________________________________________________
res5a_branch1 (Conv2D) (None, 7, 7, 2048) 2099200 activation_40[0][0]
__________________________________________________________________________________________________
bn5a_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5a_branch2c[0][0]
__________________________________________________________________________________________________
bn5a_branch1 (BatchNormalizatio (None, 7, 7, 2048) 8192 res5a_branch1[0][0]
__________________________________________________________________________________________________
add_14 (Add) (None, 7, 7, 2048) 0 bn5a_branch2c[0][0]
bn5a_branch1[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 7, 7, 2048) 0 add_14[0][0]
__________________________________________________________________________________________________
res5b_branch2a (Conv2D) (None, 7, 7, 512) 1049088 activation_43[0][0]
__________________________________________________________________________________________________
bn5b_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5b_branch2a[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 7, 7, 512) 0 bn5b_branch2a[0][0]
__________________________________________________________________________________________________
res5b_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_44[0][0]
__________________________________________________________________________________________________
bn5b_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5b_branch2b[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 7, 7, 512) 0 bn5b_branch2b[0][0]
__________________________________________________________________________________________________
res5b_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_45[0][0]
__________________________________________________________________________________________________
bn5b_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5b_branch2c[0][0]
__________________________________________________________________________________________________
add_15 (Add) (None, 7, 7, 2048) 0 bn5b_branch2c[0][0]
activation_43[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 7, 7, 2048) 0 add_15[0][0]
__________________________________________________________________________________________________
res5c_branch2a (Conv2D) (None, 7, 7, 512) 1049088 activation_46[0][0]
__________________________________________________________________________________________________
bn5c_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5c_branch2a[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 7, 7, 512) 0 bn5c_branch2a[0][0]
__________________________________________________________________________________________________
res5c_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_47[0][0]
__________________________________________________________________________________________________
bn5c_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5c_branch2b[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 7, 7, 512) 0 bn5c_branch2b[0][0]
__________________________________________________________________________________________________
res5c_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_48[0][0]
__________________________________________________________________________________________________
bn5c_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5c_branch2c[0][0]
__________________________________________________________________________________________________
add_16 (Add) (None, 7, 7, 2048) 0 bn5c_branch2c[0][0]
activation_46[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 7, 7, 2048) 0 add_16[0][0]
__________________________________________________________________________________________________
avg_pool (AveragePooling2D) (None, 1, 1, 2048) 0 activation_49[0][0]
__________________________________________________________________________________________________
global_average_pooling2d_1 (Glo (None, 2048) 0 avg_pool[0][0]
__________________________________________________________________________________________________
dropout_1 (Dropout) (None, 2048) 0 global_average_pooling2d_1[0][0]
__________________________________________________________________________________________________
dense_1 (Dense) (None, 1) 2049 dropout_1[0][0]
==================================================================================================
Total params: 23,589,761
Trainable params: 7,884,289
Non-trainable params: 15,705,472
__________________________________________________________________________________________________
result = tl_resnet50.model_fit()
Epoch 1/8 13/13 [==============================] - 38s 3s/step - loss: 0.1707 - acc: 0.9296 - val_loss: 0.2728 - val_acc: 0.9775 Epoch 00001: saving model to resnet50_01_0.2728.hdf5 Epoch 2/8 13/13 [==============================] - 20s 2s/step - loss: 0.0494 - acc: 0.9802 - val_loss: 0.1599 - val_acc: 0.9825 Epoch 00002: saving model to resnet50_02_0.1599.hdf5 Epoch 3/8 13/13 [==============================] - 25s 2s/step - loss: 0.0223 - acc: 0.9946 - val_loss: 0.2629 - val_acc: 0.9675 Epoch 00003: saving model to resnet50_03_0.2629.hdf5 Epoch 4/8 13/13 [==============================] - 26s 2s/step - loss: 0.0120 - acc: 0.9958 - val_loss: 0.1533 - val_acc: 0.9800 Epoch 00004: saving model to resnet50_04_0.1533.hdf5 Epoch 5/8 13/13 [==============================] - 26s 2s/step - loss: 0.0106 - acc: 0.9982 - val_loss: 0.1934 - val_acc: 0.9650 Epoch 00005: saving model to resnet50_05_0.1934.hdf5 Epoch 6/8 13/13 [==============================] - 25s 2s/step - loss: 0.0048 - acc: 0.9982 - val_loss: 0.1859 - val_acc: 0.9600 Epoch 00006: saving model to resnet50_06_0.1859.hdf5 Epoch 7/8 13/13 [==============================] - 25s 2s/step - loss: 0.0112 - acc: 0.9982 - val_loss: 0.0994 - val_acc: 0.9750 Epoch 00007: saving model to resnet50_07_0.0994.hdf5 Epoch 8/8 13/13 [==============================] - 25s 2s/step - loss: 0.0036 - acc: 1.0000 - val_loss: 0.2119 - val_acc: 0.9650 Epoch 00008: saving model to resnet50_08_0.2119.hdf5
tl_resnet50.save_training_log()
tl_resnet50.load_training_log()
tl_resnet50.visualize_trainning()
Saving trainning history to file: train_log_resnet50.npz Restoring trainning history from file: train_log_resnet50.npz
加载最优模型(选择val_loss最小)
best_model = tl_resnet50.find_best_model()
tl_resnet50.load_model(best_model)
The best model is: resnet50_07_0.0994.hdf5
将预测结果输出至csv文件
tl_resnet50.model_pred()
tl_resnet50.save_test_csv()
8/8 [==============================] - 15s 2s/step Saving test result on: pred_resnet50.csv
分别针对原始图像和使用数据增强变换后的图像
tl_resnet50.hl_feature_extractor(TRAIN_GEN_EX, feature_enh=False)
tl_resnet50.hl_feature_extractor(TRAIN_GEN_EX, feature_enh=True)
The output of model: Tensor("global_average_pooling2d_17_1/Mean:0", shape=(?, 2048), dtype=float32)
Non Data augmentation
Found 2000 images belonging to 2 classes.
Gen feature from train data ...
16/16 [==============================] - 28s 2s/step
Gen feature from test data ...
8/8 [==============================] - 10s 1s/step
Write feature to file: feature_resnet50.h5
The output of model: Tensor("global_average_pooling2d_17_1/Mean:0", shape=(?, 2048), dtype=float32)
Data augmentation
Found 2000 images belonging to 2 classes.
Gen feature from train data ...
16/16 [==============================] - 31s 2s/step
Write feature to file: feature_enh_resnet50.h5
tl_xception = TransferLearning_CNN(
train_dir=TRAIN_GEN,
val_dir=VAL_GEN,
test_dir=TEST_GEN,
model_name='xception',
fine_tune_layer=32,
epochs=8,
patience=5,
batch_size=128,
img_sz=(299,299))
Found 1600 images belonging to 2 classes. Found 400 images belonging to 2 classes. Found 1000 images belonging to 1 classes.
tl_xception.model_summary()
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_13 (InputLayer) (None, 299, 299, 3) 0
__________________________________________________________________________________________________
block1_conv1 (Conv2D) (None, 149, 149, 32) 864 input_13[0][0]
__________________________________________________________________________________________________
block1_conv1_bn (BatchNormaliza (None, 149, 149, 32) 128 block1_conv1[0][0]
__________________________________________________________________________________________________
block1_conv1_act (Activation) (None, 149, 149, 32) 0 block1_conv1_bn[0][0]
__________________________________________________________________________________________________
block1_conv2 (Conv2D) (None, 147, 147, 64) 18432 block1_conv1_act[0][0]
__________________________________________________________________________________________________
block1_conv2_bn (BatchNormaliza (None, 147, 147, 64) 256 block1_conv2[0][0]
__________________________________________________________________________________________________
block1_conv2_act (Activation) (None, 147, 147, 64) 0 block1_conv2_bn[0][0]
__________________________________________________________________________________________________
block2_sepconv1 (SeparableConv2 (None, 147, 147, 128 8768 block1_conv2_act[0][0]
__________________________________________________________________________________________________
block2_sepconv1_bn (BatchNormal (None, 147, 147, 128 512 block2_sepconv1[0][0]
__________________________________________________________________________________________________
block2_sepconv2_act (Activation (None, 147, 147, 128 0 block2_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block2_sepconv2 (SeparableConv2 (None, 147, 147, 128 17536 block2_sepconv2_act[0][0]
__________________________________________________________________________________________________
block2_sepconv2_bn (BatchNormal (None, 147, 147, 128 512 block2_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, 74, 74, 128) 8192 block1_conv2_act[0][0]
__________________________________________________________________________________________________
block2_pool (MaxPooling2D) (None, 74, 74, 128) 0 block2_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 74, 74, 128) 512 conv2d_1[0][0]
__________________________________________________________________________________________________
add_193 (Add) (None, 74, 74, 128) 0 block2_pool[0][0]
batch_normalization_1[0][0]
__________________________________________________________________________________________________
block3_sepconv1_act (Activation (None, 74, 74, 128) 0 add_193[0][0]
__________________________________________________________________________________________________
block3_sepconv1 (SeparableConv2 (None, 74, 74, 256) 33920 block3_sepconv1_act[0][0]
__________________________________________________________________________________________________
block3_sepconv1_bn (BatchNormal (None, 74, 74, 256) 1024 block3_sepconv1[0][0]
__________________________________________________________________________________________________
block3_sepconv2_act (Activation (None, 74, 74, 256) 0 block3_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block3_sepconv2 (SeparableConv2 (None, 74, 74, 256) 67840 block3_sepconv2_act[0][0]
__________________________________________________________________________________________________
block3_sepconv2_bn (BatchNormal (None, 74, 74, 256) 1024 block3_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, 37, 37, 256) 32768 add_193[0][0]
__________________________________________________________________________________________________
block3_pool (MaxPooling2D) (None, 37, 37, 256) 0 block3_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 37, 37, 256) 1024 conv2d_2[0][0]
__________________________________________________________________________________________________
add_194 (Add) (None, 37, 37, 256) 0 block3_pool[0][0]
batch_normalization_2[0][0]
__________________________________________________________________________________________________
block4_sepconv1_act (Activation (None, 37, 37, 256) 0 add_194[0][0]
__________________________________________________________________________________________________
block4_sepconv1 (SeparableConv2 (None, 37, 37, 728) 188672 block4_sepconv1_act[0][0]
__________________________________________________________________________________________________
block4_sepconv1_bn (BatchNormal (None, 37, 37, 728) 2912 block4_sepconv1[0][0]
__________________________________________________________________________________________________
block4_sepconv2_act (Activation (None, 37, 37, 728) 0 block4_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block4_sepconv2 (SeparableConv2 (None, 37, 37, 728) 536536 block4_sepconv2_act[0][0]
__________________________________________________________________________________________________
block4_sepconv2_bn (BatchNormal (None, 37, 37, 728) 2912 block4_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, 19, 19, 728) 186368 add_194[0][0]
__________________________________________________________________________________________________
block4_pool (MaxPooling2D) (None, 19, 19, 728) 0 block4_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 19, 19, 728) 2912 conv2d_3[0][0]
__________________________________________________________________________________________________
add_195 (Add) (None, 19, 19, 728) 0 block4_pool[0][0]
batch_normalization_3[0][0]
__________________________________________________________________________________________________
block5_sepconv1_act (Activation (None, 19, 19, 728) 0 add_195[0][0]
__________________________________________________________________________________________________
block5_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block5_sepconv1_act[0][0]
__________________________________________________________________________________________________
block5_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block5_sepconv1[0][0]
__________________________________________________________________________________________________
block5_sepconv2_act (Activation (None, 19, 19, 728) 0 block5_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block5_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block5_sepconv2_act[0][0]
__________________________________________________________________________________________________
block5_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block5_sepconv2[0][0]
__________________________________________________________________________________________________
block5_sepconv3_act (Activation (None, 19, 19, 728) 0 block5_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block5_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block5_sepconv3_act[0][0]
__________________________________________________________________________________________________
block5_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block5_sepconv3[0][0]
__________________________________________________________________________________________________
add_196 (Add) (None, 19, 19, 728) 0 block5_sepconv3_bn[0][0]
add_195[0][0]
__________________________________________________________________________________________________
block6_sepconv1_act (Activation (None, 19, 19, 728) 0 add_196[0][0]
__________________________________________________________________________________________________
block6_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block6_sepconv1_act[0][0]
__________________________________________________________________________________________________
block6_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block6_sepconv1[0][0]
__________________________________________________________________________________________________
block6_sepconv2_act (Activation (None, 19, 19, 728) 0 block6_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block6_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block6_sepconv2_act[0][0]
__________________________________________________________________________________________________
block6_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block6_sepconv2[0][0]
__________________________________________________________________________________________________
block6_sepconv3_act (Activation (None, 19, 19, 728) 0 block6_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block6_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block6_sepconv3_act[0][0]
__________________________________________________________________________________________________
block6_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block6_sepconv3[0][0]
__________________________________________________________________________________________________
add_197 (Add) (None, 19, 19, 728) 0 block6_sepconv3_bn[0][0]
add_196[0][0]
__________________________________________________________________________________________________
block7_sepconv1_act (Activation (None, 19, 19, 728) 0 add_197[0][0]
__________________________________________________________________________________________________
block7_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block7_sepconv1_act[0][0]
__________________________________________________________________________________________________
block7_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block7_sepconv1[0][0]
__________________________________________________________________________________________________
block7_sepconv2_act (Activation (None, 19, 19, 728) 0 block7_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block7_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block7_sepconv2_act[0][0]
__________________________________________________________________________________________________
block7_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block7_sepconv2[0][0]
__________________________________________________________________________________________________
block7_sepconv3_act (Activation (None, 19, 19, 728) 0 block7_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block7_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block7_sepconv3_act[0][0]
__________________________________________________________________________________________________
block7_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block7_sepconv3[0][0]
__________________________________________________________________________________________________
add_198 (Add) (None, 19, 19, 728) 0 block7_sepconv3_bn[0][0]
add_197[0][0]
__________________________________________________________________________________________________
block8_sepconv1_act (Activation (None, 19, 19, 728) 0 add_198[0][0]
__________________________________________________________________________________________________
block8_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block8_sepconv1_act[0][0]
__________________________________________________________________________________________________
block8_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block8_sepconv1[0][0]
__________________________________________________________________________________________________
block8_sepconv2_act (Activation (None, 19, 19, 728) 0 block8_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block8_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block8_sepconv2_act[0][0]
__________________________________________________________________________________________________
block8_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block8_sepconv2[0][0]
__________________________________________________________________________________________________
block8_sepconv3_act (Activation (None, 19, 19, 728) 0 block8_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block8_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block8_sepconv3_act[0][0]
__________________________________________________________________________________________________
block8_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block8_sepconv3[0][0]
__________________________________________________________________________________________________
add_199 (Add) (None, 19, 19, 728) 0 block8_sepconv3_bn[0][0]
add_198[0][0]
__________________________________________________________________________________________________
block9_sepconv1_act (Activation (None, 19, 19, 728) 0 add_199[0][0]
__________________________________________________________________________________________________
block9_sepconv1 (SeparableConv2 (None, 19, 19, 728) 536536 block9_sepconv1_act[0][0]
__________________________________________________________________________________________________
block9_sepconv1_bn (BatchNormal (None, 19, 19, 728) 2912 block9_sepconv1[0][0]
__________________________________________________________________________________________________
block9_sepconv2_act (Activation (None, 19, 19, 728) 0 block9_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block9_sepconv2 (SeparableConv2 (None, 19, 19, 728) 536536 block9_sepconv2_act[0][0]
__________________________________________________________________________________________________
block9_sepconv2_bn (BatchNormal (None, 19, 19, 728) 2912 block9_sepconv2[0][0]
__________________________________________________________________________________________________
block9_sepconv3_act (Activation (None, 19, 19, 728) 0 block9_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block9_sepconv3 (SeparableConv2 (None, 19, 19, 728) 536536 block9_sepconv3_act[0][0]
__________________________________________________________________________________________________
block9_sepconv3_bn (BatchNormal (None, 19, 19, 728) 2912 block9_sepconv3[0][0]
__________________________________________________________________________________________________
add_200 (Add) (None, 19, 19, 728) 0 block9_sepconv3_bn[0][0]
add_199[0][0]
__________________________________________________________________________________________________
block10_sepconv1_act (Activatio (None, 19, 19, 728) 0 add_200[0][0]
__________________________________________________________________________________________________
block10_sepconv1 (SeparableConv (None, 19, 19, 728) 536536 block10_sepconv1_act[0][0]
__________________________________________________________________________________________________
block10_sepconv1_bn (BatchNorma (None, 19, 19, 728) 2912 block10_sepconv1[0][0]
__________________________________________________________________________________________________
block10_sepconv2_act (Activatio (None, 19, 19, 728) 0 block10_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block10_sepconv2 (SeparableConv (None, 19, 19, 728) 536536 block10_sepconv2_act[0][0]
__________________________________________________________________________________________________
block10_sepconv2_bn (BatchNorma (None, 19, 19, 728) 2912 block10_sepconv2[0][0]
__________________________________________________________________________________________________
block10_sepconv3_act (Activatio (None, 19, 19, 728) 0 block10_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block10_sepconv3 (SeparableConv (None, 19, 19, 728) 536536 block10_sepconv3_act[0][0]
__________________________________________________________________________________________________
block10_sepconv3_bn (BatchNorma (None, 19, 19, 728) 2912 block10_sepconv3[0][0]
__________________________________________________________________________________________________
add_201 (Add) (None, 19, 19, 728) 0 block10_sepconv3_bn[0][0]
add_200[0][0]
__________________________________________________________________________________________________
block11_sepconv1_act (Activatio (None, 19, 19, 728) 0 add_201[0][0]
__________________________________________________________________________________________________
block11_sepconv1 (SeparableConv (None, 19, 19, 728) 536536 block11_sepconv1_act[0][0]
__________________________________________________________________________________________________
block11_sepconv1_bn (BatchNorma (None, 19, 19, 728) 2912 block11_sepconv1[0][0]
__________________________________________________________________________________________________
block11_sepconv2_act (Activatio (None, 19, 19, 728) 0 block11_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block11_sepconv2 (SeparableConv (None, 19, 19, 728) 536536 block11_sepconv2_act[0][0]
__________________________________________________________________________________________________
block11_sepconv2_bn (BatchNorma (None, 19, 19, 728) 2912 block11_sepconv2[0][0]
__________________________________________________________________________________________________
block11_sepconv3_act (Activatio (None, 19, 19, 728) 0 block11_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block11_sepconv3 (SeparableConv (None, 19, 19, 728) 536536 block11_sepconv3_act[0][0]
__________________________________________________________________________________________________
block11_sepconv3_bn (BatchNorma (None, 19, 19, 728) 2912 block11_sepconv3[0][0]
__________________________________________________________________________________________________
add_202 (Add) (None, 19, 19, 728) 0 block11_sepconv3_bn[0][0]
add_201[0][0]
__________________________________________________________________________________________________
block12_sepconv1_act (Activatio (None, 19, 19, 728) 0 add_202[0][0]
__________________________________________________________________________________________________
block12_sepconv1 (SeparableConv (None, 19, 19, 728) 536536 block12_sepconv1_act[0][0]
__________________________________________________________________________________________________
block12_sepconv1_bn (BatchNorma (None, 19, 19, 728) 2912 block12_sepconv1[0][0]
__________________________________________________________________________________________________
block12_sepconv2_act (Activatio (None, 19, 19, 728) 0 block12_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block12_sepconv2 (SeparableConv (None, 19, 19, 728) 536536 block12_sepconv2_act[0][0]
__________________________________________________________________________________________________
block12_sepconv2_bn (BatchNorma (None, 19, 19, 728) 2912 block12_sepconv2[0][0]
__________________________________________________________________________________________________
block12_sepconv3_act (Activatio (None, 19, 19, 728) 0 block12_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block12_sepconv3 (SeparableConv (None, 19, 19, 728) 536536 block12_sepconv3_act[0][0]
__________________________________________________________________________________________________
block12_sepconv3_bn (BatchNorma (None, 19, 19, 728) 2912 block12_sepconv3[0][0]
__________________________________________________________________________________________________
add_203 (Add) (None, 19, 19, 728) 0 block12_sepconv3_bn[0][0]
add_202[0][0]
__________________________________________________________________________________________________
block13_sepconv1_act (Activatio (None, 19, 19, 728) 0 add_203[0][0]
__________________________________________________________________________________________________
block13_sepconv1 (SeparableConv (None, 19, 19, 728) 536536 block13_sepconv1_act[0][0]
__________________________________________________________________________________________________
block13_sepconv1_bn (BatchNorma (None, 19, 19, 728) 2912 block13_sepconv1[0][0]
__________________________________________________________________________________________________
block13_sepconv2_act (Activatio (None, 19, 19, 728) 0 block13_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block13_sepconv2 (SeparableConv (None, 19, 19, 1024) 752024 block13_sepconv2_act[0][0]
__________________________________________________________________________________________________
block13_sepconv2_bn (BatchNorma (None, 19, 19, 1024) 4096 block13_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, 10, 10, 1024) 745472 add_203[0][0]
__________________________________________________________________________________________________
block13_pool (MaxPooling2D) (None, 10, 10, 1024) 0 block13_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 10, 10, 1024) 4096 conv2d_4[0][0]
__________________________________________________________________________________________________
add_204 (Add) (None, 10, 10, 1024) 0 block13_pool[0][0]
batch_normalization_4[0][0]
__________________________________________________________________________________________________
block14_sepconv1 (SeparableConv (None, 10, 10, 1536) 1582080 add_204[0][0]
__________________________________________________________________________________________________
block14_sepconv1_bn (BatchNorma (None, 10, 10, 1536) 6144 block14_sepconv1[0][0]
__________________________________________________________________________________________________
block14_sepconv1_act (Activatio (None, 10, 10, 1536) 0 block14_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block14_sepconv2 (SeparableConv (None, 10, 10, 2048) 3159552 block14_sepconv1_act[0][0]
__________________________________________________________________________________________________
block14_sepconv2_bn (BatchNorma (None, 10, 10, 2048) 8192 block14_sepconv2[0][0]
__________________________________________________________________________________________________
block14_sepconv2_act (Activatio (None, 10, 10, 2048) 0 block14_sepconv2_bn[0][0]
__________________________________________________________________________________________________
global_average_pooling2d_13 (Gl (None, 2048) 0 block14_sepconv2_act[0][0]
__________________________________________________________________________________________________
dropout_13 (Dropout) (None, 2048) 0 global_average_pooling2d_13[0][0]
__________________________________________________________________________________________________
dense_13 (Dense) (None, 1) 2049 dropout_13[0][0]
==================================================================================================
Total params: 20,863,529
Trainable params: 9,480,393
Non-trainable params: 11,383,136
__________________________________________________________________________________________________
result = tl_xception.model_fit()
Epoch 1/8 13/13 [==============================] - 67s 5s/step - loss: 0.1383 - acc: 0.9393 - val_loss: 0.0296 - val_acc: 0.9950 Epoch 00001: saving model to xception_01_0.0296.hdf5 Epoch 2/8 13/13 [==============================] - 50s 4s/step - loss: 0.0210 - acc: 0.9934 - val_loss: 0.0429 - val_acc: 0.9900 Epoch 00002: saving model to xception_02_0.0429.hdf5 Epoch 3/8 13/13 [==============================] - 50s 4s/step - loss: 0.0096 - acc: 0.9964 - val_loss: 0.0381 - val_acc: 0.9900 Epoch 00003: saving model to xception_03_0.0381.hdf5 Epoch 4/8 13/13 [==============================] - 50s 4s/step - loss: 0.0072 - acc: 0.9970 - val_loss: 0.0441 - val_acc: 0.9825 Epoch 00004: saving model to xception_04_0.0441.hdf5 Epoch 5/8 13/13 [==============================] - 50s 4s/step - loss: 0.0076 - acc: 0.9976 - val_loss: 0.0428 - val_acc: 0.9925 Epoch 00005: saving model to xception_05_0.0428.hdf5 Epoch 00005: early stopping
tl_xception.save_training_log()
tl_xception.load_training_log()
tl_xception.visualize_trainning()
Saving trainning history to file: train_log_xception.npz Restoring trainning history from file: train_log_xception.npz
加载最优模型(val_loss最小)
best_model = tl_xception.find_best_model()
tl_xception.load_model(best_model)
The best model is: xception_01_0.0296.hdf5
将预测结果输出至csv文件
tl_xception.model_pred()
tl_xception.save_test_csv()
8/8 [==============================] - 27s 3s/step Saving test result on: pred_xception.csv
分别针对原始图像和使用数据增强变换后的图像
tl_xception.hl_feature_extractor(TRAIN_GEN_EX, feature_enh=False)
tl_xception.hl_feature_extractor(TRAIN_GEN_EX, feature_enh=True)
The output of model: Tensor("global_average_pooling2d_13_1/Mean:0", shape=(?, 2048), dtype=float32)
Non Data augmentation
Found 2000 images belonging to 2 classes.
Gen feature from train data ...
16/16 [==============================] - 48s 3s/step
Gen feature from test data ...
8/8 [==============================] - 22s 3s/step
Write feature to file: feature_xception.h5
The output of model: Tensor("global_average_pooling2d_13_1/Mean:0", shape=(?, 2048), dtype=float32)
Data augmentation
Found 2000 images belonging to 2 classes.
Gen feature from train data ...
16/16 [==============================] - 52s 3s/step
Write feature to file: feature_enh_xception.h5
tl_inception_restnet_v2 = TransferLearning_CNN(
train_dir=TRAIN_GEN,
val_dir=VAL_GEN,
test_dir=TEST_GEN,
model_name='inception_res_v2',
fine_tune_layer=80,
epochs=8,
patience=4,
batch_size=128,
img_sz=(299,299))
Found 1600 images belonging to 2 classes. Found 400 images belonging to 2 classes. Found 1000 images belonging to 1 classes. Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.7/inception_resnet_v2_weights_tf_dim_ordering_tf_kernels_notop.h5 219062272/219055592 [==============================] - 3s 0us/step
tl_inception_restnet_v2.model_summary()
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_14 (InputLayer) (None, 299, 299, 3) 0
__________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, 149, 149, 32) 864 input_14[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 149, 149, 32) 96 conv2d_5[0][0]
__________________________________________________________________________________________________
activation_589 (Activation) (None, 149, 149, 32) 0 batch_normalization_5[0][0]
__________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, 147, 147, 32) 9216 activation_589[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 147, 147, 32) 96 conv2d_6[0][0]
__________________________________________________________________________________________________
activation_590 (Activation) (None, 147, 147, 32) 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, 147, 147, 64) 18432 activation_590[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 147, 147, 64) 192 conv2d_7[0][0]
__________________________________________________________________________________________________
activation_591 (Activation) (None, 147, 147, 64) 0 batch_normalization_7[0][0]
__________________________________________________________________________________________________
max_pooling2d_13 (MaxPooling2D) (None, 73, 73, 64) 0 activation_591[0][0]
__________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, 73, 73, 80) 5120 max_pooling2d_13[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 73, 73, 80) 240 conv2d_8[0][0]
__________________________________________________________________________________________________
activation_592 (Activation) (None, 73, 73, 80) 0 batch_normalization_8[0][0]
__________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, 71, 71, 192) 138240 activation_592[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 71, 71, 192) 576 conv2d_9[0][0]
__________________________________________________________________________________________________
activation_593 (Activation) (None, 71, 71, 192) 0 batch_normalization_9[0][0]
__________________________________________________________________________________________________
max_pooling2d_14 (MaxPooling2D) (None, 35, 35, 192) 0 activation_593[0][0]
__________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, 35, 35, 64) 12288 max_pooling2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 35, 35, 64) 192 conv2d_13[0][0]
__________________________________________________________________________________________________
activation_597 (Activation) (None, 35, 35, 64) 0 batch_normalization_13[0][0]
__________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, 35, 35, 48) 9216 max_pooling2d_14[0][0]
__________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, 35, 35, 96) 55296 activation_597[0][0]
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 35, 35, 48) 144 conv2d_11[0][0]
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 35, 35, 96) 288 conv2d_14[0][0]
__________________________________________________________________________________________________
activation_595 (Activation) (None, 35, 35, 48) 0 batch_normalization_11[0][0]
__________________________________________________________________________________________________
activation_598 (Activation) (None, 35, 35, 96) 0 batch_normalization_14[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 35, 35, 192) 0 max_pooling2d_14[0][0]
__________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, 35, 35, 96) 18432 max_pooling2d_14[0][0]
__________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, 35, 35, 64) 76800 activation_595[0][0]
__________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, 35, 35, 96) 82944 activation_598[0][0]
__________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, 35, 35, 64) 12288 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 35, 35, 96) 288 conv2d_10[0][0]
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 35, 35, 64) 192 conv2d_12[0][0]
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 35, 35, 96) 288 conv2d_15[0][0]
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 35, 35, 64) 192 conv2d_16[0][0]
__________________________________________________________________________________________________
activation_594 (Activation) (None, 35, 35, 96) 0 batch_normalization_10[0][0]
__________________________________________________________________________________________________
activation_596 (Activation) (None, 35, 35, 64) 0 batch_normalization_12[0][0]
__________________________________________________________________________________________________
activation_599 (Activation) (None, 35, 35, 96) 0 batch_normalization_15[0][0]
__________________________________________________________________________________________________
activation_600 (Activation) (None, 35, 35, 64) 0 batch_normalization_16[0][0]
__________________________________________________________________________________________________
mixed_5b (Concatenate) (None, 35, 35, 320) 0 activation_594[0][0]
activation_596[0][0]
activation_599[0][0]
activation_600[0][0]
__________________________________________________________________________________________________
conv2d_20 (Conv2D) (None, 35, 35, 32) 10240 mixed_5b[0][0]
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 35, 35, 32) 96 conv2d_20[0][0]
__________________________________________________________________________________________________
activation_604 (Activation) (None, 35, 35, 32) 0 batch_normalization_20[0][0]
__________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, 35, 35, 32) 10240 mixed_5b[0][0]
__________________________________________________________________________________________________
conv2d_21 (Conv2D) (None, 35, 35, 48) 13824 activation_604[0][0]
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 35, 35, 32) 96 conv2d_18[0][0]
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 35, 35, 48) 144 conv2d_21[0][0]
__________________________________________________________________________________________________
activation_602 (Activation) (None, 35, 35, 32) 0 batch_normalization_18[0][0]
__________________________________________________________________________________________________
activation_605 (Activation) (None, 35, 35, 48) 0 batch_normalization_21[0][0]
__________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, 35, 35, 32) 10240 mixed_5b[0][0]
__________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, 35, 35, 32) 9216 activation_602[0][0]
__________________________________________________________________________________________________
conv2d_22 (Conv2D) (None, 35, 35, 64) 27648 activation_605[0][0]
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 35, 35, 32) 96 conv2d_17[0][0]
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 35, 35, 32) 96 conv2d_19[0][0]
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 35, 35, 64) 192 conv2d_22[0][0]
__________________________________________________________________________________________________
activation_601 (Activation) (None, 35, 35, 32) 0 batch_normalization_17[0][0]
__________________________________________________________________________________________________
activation_603 (Activation) (None, 35, 35, 32) 0 batch_normalization_19[0][0]
__________________________________________________________________________________________________
activation_606 (Activation) (None, 35, 35, 64) 0 batch_normalization_22[0][0]
__________________________________________________________________________________________________
block35_1_mixed (Concatenate) (None, 35, 35, 128) 0 activation_601[0][0]
activation_603[0][0]
activation_606[0][0]
__________________________________________________________________________________________________
block35_1_conv (Conv2D) (None, 35, 35, 320) 41280 block35_1_mixed[0][0]
__________________________________________________________________________________________________
block35_1 (Lambda) (None, 35, 35, 320) 0 mixed_5b[0][0]
block35_1_conv[0][0]
__________________________________________________________________________________________________
block35_1_ac (Activation) (None, 35, 35, 320) 0 block35_1[0][0]
__________________________________________________________________________________________________
conv2d_26 (Conv2D) (None, 35, 35, 32) 10240 block35_1_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 35, 35, 32) 96 conv2d_26[0][0]
__________________________________________________________________________________________________
activation_610 (Activation) (None, 35, 35, 32) 0 batch_normalization_26[0][0]
__________________________________________________________________________________________________
conv2d_24 (Conv2D) (None, 35, 35, 32) 10240 block35_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_27 (Conv2D) (None, 35, 35, 48) 13824 activation_610[0][0]
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 35, 35, 32) 96 conv2d_24[0][0]
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 35, 35, 48) 144 conv2d_27[0][0]
__________________________________________________________________________________________________
activation_608 (Activation) (None, 35, 35, 32) 0 batch_normalization_24[0][0]
__________________________________________________________________________________________________
activation_611 (Activation) (None, 35, 35, 48) 0 batch_normalization_27[0][0]
__________________________________________________________________________________________________
conv2d_23 (Conv2D) (None, 35, 35, 32) 10240 block35_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_25 (Conv2D) (None, 35, 35, 32) 9216 activation_608[0][0]
__________________________________________________________________________________________________
conv2d_28 (Conv2D) (None, 35, 35, 64) 27648 activation_611[0][0]
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 35, 35, 32) 96 conv2d_23[0][0]
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 35, 35, 32) 96 conv2d_25[0][0]
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 35, 35, 64) 192 conv2d_28[0][0]
__________________________________________________________________________________________________
activation_607 (Activation) (None, 35, 35, 32) 0 batch_normalization_23[0][0]
__________________________________________________________________________________________________
activation_609 (Activation) (None, 35, 35, 32) 0 batch_normalization_25[0][0]
__________________________________________________________________________________________________
activation_612 (Activation) (None, 35, 35, 64) 0 batch_normalization_28[0][0]
__________________________________________________________________________________________________
block35_2_mixed (Concatenate) (None, 35, 35, 128) 0 activation_607[0][0]
activation_609[0][0]
activation_612[0][0]
__________________________________________________________________________________________________
block35_2_conv (Conv2D) (None, 35, 35, 320) 41280 block35_2_mixed[0][0]
__________________________________________________________________________________________________
block35_2 (Lambda) (None, 35, 35, 320) 0 block35_1_ac[0][0]
block35_2_conv[0][0]
__________________________________________________________________________________________________
block35_2_ac (Activation) (None, 35, 35, 320) 0 block35_2[0][0]
__________________________________________________________________________________________________
conv2d_32 (Conv2D) (None, 35, 35, 32) 10240 block35_2_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 35, 35, 32) 96 conv2d_32[0][0]
__________________________________________________________________________________________________
activation_616 (Activation) (None, 35, 35, 32) 0 batch_normalization_32[0][0]
__________________________________________________________________________________________________
conv2d_30 (Conv2D) (None, 35, 35, 32) 10240 block35_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_33 (Conv2D) (None, 35, 35, 48) 13824 activation_616[0][0]
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 35, 35, 32) 96 conv2d_30[0][0]
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 35, 35, 48) 144 conv2d_33[0][0]
__________________________________________________________________________________________________
activation_614 (Activation) (None, 35, 35, 32) 0 batch_normalization_30[0][0]
__________________________________________________________________________________________________
activation_617 (Activation) (None, 35, 35, 48) 0 batch_normalization_33[0][0]
__________________________________________________________________________________________________
conv2d_29 (Conv2D) (None, 35, 35, 32) 10240 block35_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_31 (Conv2D) (None, 35, 35, 32) 9216 activation_614[0][0]
__________________________________________________________________________________________________
conv2d_34 (Conv2D) (None, 35, 35, 64) 27648 activation_617[0][0]
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 35, 35, 32) 96 conv2d_29[0][0]
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 35, 35, 32) 96 conv2d_31[0][0]
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 35, 35, 64) 192 conv2d_34[0][0]
__________________________________________________________________________________________________
activation_613 (Activation) (None, 35, 35, 32) 0 batch_normalization_29[0][0]
__________________________________________________________________________________________________
activation_615 (Activation) (None, 35, 35, 32) 0 batch_normalization_31[0][0]
__________________________________________________________________________________________________
activation_618 (Activation) (None, 35, 35, 64) 0 batch_normalization_34[0][0]
__________________________________________________________________________________________________
block35_3_mixed (Concatenate) (None, 35, 35, 128) 0 activation_613[0][0]
activation_615[0][0]
activation_618[0][0]
__________________________________________________________________________________________________
block35_3_conv (Conv2D) (None, 35, 35, 320) 41280 block35_3_mixed[0][0]
__________________________________________________________________________________________________
block35_3 (Lambda) (None, 35, 35, 320) 0 block35_2_ac[0][0]
block35_3_conv[0][0]
__________________________________________________________________________________________________
block35_3_ac (Activation) (None, 35, 35, 320) 0 block35_3[0][0]
__________________________________________________________________________________________________
conv2d_38 (Conv2D) (None, 35, 35, 32) 10240 block35_3_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 35, 35, 32) 96 conv2d_38[0][0]
__________________________________________________________________________________________________
activation_622 (Activation) (None, 35, 35, 32) 0 batch_normalization_38[0][0]
__________________________________________________________________________________________________
conv2d_36 (Conv2D) (None, 35, 35, 32) 10240 block35_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_39 (Conv2D) (None, 35, 35, 48) 13824 activation_622[0][0]
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 35, 35, 32) 96 conv2d_36[0][0]
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 35, 35, 48) 144 conv2d_39[0][0]
__________________________________________________________________________________________________
activation_620 (Activation) (None, 35, 35, 32) 0 batch_normalization_36[0][0]
__________________________________________________________________________________________________
activation_623 (Activation) (None, 35, 35, 48) 0 batch_normalization_39[0][0]
__________________________________________________________________________________________________
conv2d_35 (Conv2D) (None, 35, 35, 32) 10240 block35_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_37 (Conv2D) (None, 35, 35, 32) 9216 activation_620[0][0]
__________________________________________________________________________________________________
conv2d_40 (Conv2D) (None, 35, 35, 64) 27648 activation_623[0][0]
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 35, 35, 32) 96 conv2d_35[0][0]
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 35, 35, 32) 96 conv2d_37[0][0]
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 35, 35, 64) 192 conv2d_40[0][0]
__________________________________________________________________________________________________
activation_619 (Activation) (None, 35, 35, 32) 0 batch_normalization_35[0][0]
__________________________________________________________________________________________________
activation_621 (Activation) (None, 35, 35, 32) 0 batch_normalization_37[0][0]
__________________________________________________________________________________________________
activation_624 (Activation) (None, 35, 35, 64) 0 batch_normalization_40[0][0]
__________________________________________________________________________________________________
block35_4_mixed (Concatenate) (None, 35, 35, 128) 0 activation_619[0][0]
activation_621[0][0]
activation_624[0][0]
__________________________________________________________________________________________________
block35_4_conv (Conv2D) (None, 35, 35, 320) 41280 block35_4_mixed[0][0]
__________________________________________________________________________________________________
block35_4 (Lambda) (None, 35, 35, 320) 0 block35_3_ac[0][0]
block35_4_conv[0][0]
__________________________________________________________________________________________________
block35_4_ac (Activation) (None, 35, 35, 320) 0 block35_4[0][0]
__________________________________________________________________________________________________
conv2d_44 (Conv2D) (None, 35, 35, 32) 10240 block35_4_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 35, 35, 32) 96 conv2d_44[0][0]
__________________________________________________________________________________________________
activation_628 (Activation) (None, 35, 35, 32) 0 batch_normalization_44[0][0]
__________________________________________________________________________________________________
conv2d_42 (Conv2D) (None, 35, 35, 32) 10240 block35_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_45 (Conv2D) (None, 35, 35, 48) 13824 activation_628[0][0]
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 35, 35, 32) 96 conv2d_42[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 35, 35, 48) 144 conv2d_45[0][0]
__________________________________________________________________________________________________
activation_626 (Activation) (None, 35, 35, 32) 0 batch_normalization_42[0][0]
__________________________________________________________________________________________________
activation_629 (Activation) (None, 35, 35, 48) 0 batch_normalization_45[0][0]
__________________________________________________________________________________________________
conv2d_41 (Conv2D) (None, 35, 35, 32) 10240 block35_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_43 (Conv2D) (None, 35, 35, 32) 9216 activation_626[0][0]
__________________________________________________________________________________________________
conv2d_46 (Conv2D) (None, 35, 35, 64) 27648 activation_629[0][0]
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 35, 35, 32) 96 conv2d_41[0][0]
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 35, 35, 32) 96 conv2d_43[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 35, 35, 64) 192 conv2d_46[0][0]
__________________________________________________________________________________________________
activation_625 (Activation) (None, 35, 35, 32) 0 batch_normalization_41[0][0]
__________________________________________________________________________________________________
activation_627 (Activation) (None, 35, 35, 32) 0 batch_normalization_43[0][0]
__________________________________________________________________________________________________
activation_630 (Activation) (None, 35, 35, 64) 0 batch_normalization_46[0][0]
__________________________________________________________________________________________________
block35_5_mixed (Concatenate) (None, 35, 35, 128) 0 activation_625[0][0]
activation_627[0][0]
activation_630[0][0]
__________________________________________________________________________________________________
block35_5_conv (Conv2D) (None, 35, 35, 320) 41280 block35_5_mixed[0][0]
__________________________________________________________________________________________________
block35_5 (Lambda) (None, 35, 35, 320) 0 block35_4_ac[0][0]
block35_5_conv[0][0]
__________________________________________________________________________________________________
block35_5_ac (Activation) (None, 35, 35, 320) 0 block35_5[0][0]
__________________________________________________________________________________________________
conv2d_50 (Conv2D) (None, 35, 35, 32) 10240 block35_5_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 35, 35, 32) 96 conv2d_50[0][0]
__________________________________________________________________________________________________
activation_634 (Activation) (None, 35, 35, 32) 0 batch_normalization_50[0][0]
__________________________________________________________________________________________________
conv2d_48 (Conv2D) (None, 35, 35, 32) 10240 block35_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_51 (Conv2D) (None, 35, 35, 48) 13824 activation_634[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 35, 35, 32) 96 conv2d_48[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 35, 35, 48) 144 conv2d_51[0][0]
__________________________________________________________________________________________________
activation_632 (Activation) (None, 35, 35, 32) 0 batch_normalization_48[0][0]
__________________________________________________________________________________________________
activation_635 (Activation) (None, 35, 35, 48) 0 batch_normalization_51[0][0]
__________________________________________________________________________________________________
conv2d_47 (Conv2D) (None, 35, 35, 32) 10240 block35_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_49 (Conv2D) (None, 35, 35, 32) 9216 activation_632[0][0]
__________________________________________________________________________________________________
conv2d_52 (Conv2D) (None, 35, 35, 64) 27648 activation_635[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 35, 35, 32) 96 conv2d_47[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 35, 35, 32) 96 conv2d_49[0][0]
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 35, 35, 64) 192 conv2d_52[0][0]
__________________________________________________________________________________________________
activation_631 (Activation) (None, 35, 35, 32) 0 batch_normalization_47[0][0]
__________________________________________________________________________________________________
activation_633 (Activation) (None, 35, 35, 32) 0 batch_normalization_49[0][0]
__________________________________________________________________________________________________
activation_636 (Activation) (None, 35, 35, 64) 0 batch_normalization_52[0][0]
__________________________________________________________________________________________________
block35_6_mixed (Concatenate) (None, 35, 35, 128) 0 activation_631[0][0]
activation_633[0][0]
activation_636[0][0]
__________________________________________________________________________________________________
block35_6_conv (Conv2D) (None, 35, 35, 320) 41280 block35_6_mixed[0][0]
__________________________________________________________________________________________________
block35_6 (Lambda) (None, 35, 35, 320) 0 block35_5_ac[0][0]
block35_6_conv[0][0]
__________________________________________________________________________________________________
block35_6_ac (Activation) (None, 35, 35, 320) 0 block35_6[0][0]
__________________________________________________________________________________________________
conv2d_56 (Conv2D) (None, 35, 35, 32) 10240 block35_6_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, 35, 35, 32) 96 conv2d_56[0][0]
__________________________________________________________________________________________________
activation_640 (Activation) (None, 35, 35, 32) 0 batch_normalization_56[0][0]
__________________________________________________________________________________________________
conv2d_54 (Conv2D) (None, 35, 35, 32) 10240 block35_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_57 (Conv2D) (None, 35, 35, 48) 13824 activation_640[0][0]
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, 35, 35, 32) 96 conv2d_54[0][0]
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, 35, 35, 48) 144 conv2d_57[0][0]
__________________________________________________________________________________________________
activation_638 (Activation) (None, 35, 35, 32) 0 batch_normalization_54[0][0]
__________________________________________________________________________________________________
activation_641 (Activation) (None, 35, 35, 48) 0 batch_normalization_57[0][0]
__________________________________________________________________________________________________
conv2d_53 (Conv2D) (None, 35, 35, 32) 10240 block35_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_55 (Conv2D) (None, 35, 35, 32) 9216 activation_638[0][0]
__________________________________________________________________________________________________
conv2d_58 (Conv2D) (None, 35, 35, 64) 27648 activation_641[0][0]
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, 35, 35, 32) 96 conv2d_53[0][0]
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, 35, 35, 32) 96 conv2d_55[0][0]
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, 35, 35, 64) 192 conv2d_58[0][0]
__________________________________________________________________________________________________
activation_637 (Activation) (None, 35, 35, 32) 0 batch_normalization_53[0][0]
__________________________________________________________________________________________________
activation_639 (Activation) (None, 35, 35, 32) 0 batch_normalization_55[0][0]
__________________________________________________________________________________________________
activation_642 (Activation) (None, 35, 35, 64) 0 batch_normalization_58[0][0]
__________________________________________________________________________________________________
block35_7_mixed (Concatenate) (None, 35, 35, 128) 0 activation_637[0][0]
activation_639[0][0]
activation_642[0][0]
__________________________________________________________________________________________________
block35_7_conv (Conv2D) (None, 35, 35, 320) 41280 block35_7_mixed[0][0]
__________________________________________________________________________________________________
block35_7 (Lambda) (None, 35, 35, 320) 0 block35_6_ac[0][0]
block35_7_conv[0][0]
__________________________________________________________________________________________________
block35_7_ac (Activation) (None, 35, 35, 320) 0 block35_7[0][0]
__________________________________________________________________________________________________
conv2d_62 (Conv2D) (None, 35, 35, 32) 10240 block35_7_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, 35, 35, 32) 96 conv2d_62[0][0]
__________________________________________________________________________________________________
activation_646 (Activation) (None, 35, 35, 32) 0 batch_normalization_62[0][0]
__________________________________________________________________________________________________
conv2d_60 (Conv2D) (None, 35, 35, 32) 10240 block35_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_63 (Conv2D) (None, 35, 35, 48) 13824 activation_646[0][0]
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, 35, 35, 32) 96 conv2d_60[0][0]
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, 35, 35, 48) 144 conv2d_63[0][0]
__________________________________________________________________________________________________
activation_644 (Activation) (None, 35, 35, 32) 0 batch_normalization_60[0][0]
__________________________________________________________________________________________________
activation_647 (Activation) (None, 35, 35, 48) 0 batch_normalization_63[0][0]
__________________________________________________________________________________________________
conv2d_59 (Conv2D) (None, 35, 35, 32) 10240 block35_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_61 (Conv2D) (None, 35, 35, 32) 9216 activation_644[0][0]
__________________________________________________________________________________________________
conv2d_64 (Conv2D) (None, 35, 35, 64) 27648 activation_647[0][0]
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, 35, 35, 32) 96 conv2d_59[0][0]
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, 35, 35, 32) 96 conv2d_61[0][0]
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, 35, 35, 64) 192 conv2d_64[0][0]
__________________________________________________________________________________________________
activation_643 (Activation) (None, 35, 35, 32) 0 batch_normalization_59[0][0]
__________________________________________________________________________________________________
activation_645 (Activation) (None, 35, 35, 32) 0 batch_normalization_61[0][0]
__________________________________________________________________________________________________
activation_648 (Activation) (None, 35, 35, 64) 0 batch_normalization_64[0][0]
__________________________________________________________________________________________________
block35_8_mixed (Concatenate) (None, 35, 35, 128) 0 activation_643[0][0]
activation_645[0][0]
activation_648[0][0]
__________________________________________________________________________________________________
block35_8_conv (Conv2D) (None, 35, 35, 320) 41280 block35_8_mixed[0][0]
__________________________________________________________________________________________________
block35_8 (Lambda) (None, 35, 35, 320) 0 block35_7_ac[0][0]
block35_8_conv[0][0]
__________________________________________________________________________________________________
block35_8_ac (Activation) (None, 35, 35, 320) 0 block35_8[0][0]
__________________________________________________________________________________________________
conv2d_68 (Conv2D) (None, 35, 35, 32) 10240 block35_8_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, 35, 35, 32) 96 conv2d_68[0][0]
__________________________________________________________________________________________________
activation_652 (Activation) (None, 35, 35, 32) 0 batch_normalization_68[0][0]
__________________________________________________________________________________________________
conv2d_66 (Conv2D) (None, 35, 35, 32) 10240 block35_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_69 (Conv2D) (None, 35, 35, 48) 13824 activation_652[0][0]
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, 35, 35, 32) 96 conv2d_66[0][0]
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, 35, 35, 48) 144 conv2d_69[0][0]
__________________________________________________________________________________________________
activation_650 (Activation) (None, 35, 35, 32) 0 batch_normalization_66[0][0]
__________________________________________________________________________________________________
activation_653 (Activation) (None, 35, 35, 48) 0 batch_normalization_69[0][0]
__________________________________________________________________________________________________
conv2d_65 (Conv2D) (None, 35, 35, 32) 10240 block35_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_67 (Conv2D) (None, 35, 35, 32) 9216 activation_650[0][0]
__________________________________________________________________________________________________
conv2d_70 (Conv2D) (None, 35, 35, 64) 27648 activation_653[0][0]
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, 35, 35, 32) 96 conv2d_65[0][0]
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, 35, 35, 32) 96 conv2d_67[0][0]
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, 35, 35, 64) 192 conv2d_70[0][0]
__________________________________________________________________________________________________
activation_649 (Activation) (None, 35, 35, 32) 0 batch_normalization_65[0][0]
__________________________________________________________________________________________________
activation_651 (Activation) (None, 35, 35, 32) 0 batch_normalization_67[0][0]
__________________________________________________________________________________________________
activation_654 (Activation) (None, 35, 35, 64) 0 batch_normalization_70[0][0]
__________________________________________________________________________________________________
block35_9_mixed (Concatenate) (None, 35, 35, 128) 0 activation_649[0][0]
activation_651[0][0]
activation_654[0][0]
__________________________________________________________________________________________________
block35_9_conv (Conv2D) (None, 35, 35, 320) 41280 block35_9_mixed[0][0]
__________________________________________________________________________________________________
block35_9 (Lambda) (None, 35, 35, 320) 0 block35_8_ac[0][0]
block35_9_conv[0][0]
__________________________________________________________________________________________________
block35_9_ac (Activation) (None, 35, 35, 320) 0 block35_9[0][0]
__________________________________________________________________________________________________
conv2d_74 (Conv2D) (None, 35, 35, 32) 10240 block35_9_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, 35, 35, 32) 96 conv2d_74[0][0]
__________________________________________________________________________________________________
activation_658 (Activation) (None, 35, 35, 32) 0 batch_normalization_74[0][0]
__________________________________________________________________________________________________
conv2d_72 (Conv2D) (None, 35, 35, 32) 10240 block35_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_75 (Conv2D) (None, 35, 35, 48) 13824 activation_658[0][0]
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, 35, 35, 32) 96 conv2d_72[0][0]
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, 35, 35, 48) 144 conv2d_75[0][0]
__________________________________________________________________________________________________
activation_656 (Activation) (None, 35, 35, 32) 0 batch_normalization_72[0][0]
__________________________________________________________________________________________________
activation_659 (Activation) (None, 35, 35, 48) 0 batch_normalization_75[0][0]
__________________________________________________________________________________________________
conv2d_71 (Conv2D) (None, 35, 35, 32) 10240 block35_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_73 (Conv2D) (None, 35, 35, 32) 9216 activation_656[0][0]
__________________________________________________________________________________________________
conv2d_76 (Conv2D) (None, 35, 35, 64) 27648 activation_659[0][0]
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, 35, 35, 32) 96 conv2d_71[0][0]
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, 35, 35, 32) 96 conv2d_73[0][0]
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, 35, 35, 64) 192 conv2d_76[0][0]
__________________________________________________________________________________________________
activation_655 (Activation) (None, 35, 35, 32) 0 batch_normalization_71[0][0]
__________________________________________________________________________________________________
activation_657 (Activation) (None, 35, 35, 32) 0 batch_normalization_73[0][0]
__________________________________________________________________________________________________
activation_660 (Activation) (None, 35, 35, 64) 0 batch_normalization_76[0][0]
__________________________________________________________________________________________________
block35_10_mixed (Concatenate) (None, 35, 35, 128) 0 activation_655[0][0]
activation_657[0][0]
activation_660[0][0]
__________________________________________________________________________________________________
block35_10_conv (Conv2D) (None, 35, 35, 320) 41280 block35_10_mixed[0][0]
__________________________________________________________________________________________________
block35_10 (Lambda) (None, 35, 35, 320) 0 block35_9_ac[0][0]
block35_10_conv[0][0]
__________________________________________________________________________________________________
block35_10_ac (Activation) (None, 35, 35, 320) 0 block35_10[0][0]
__________________________________________________________________________________________________
conv2d_78 (Conv2D) (None, 35, 35, 256) 81920 block35_10_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, 35, 35, 256) 768 conv2d_78[0][0]
__________________________________________________________________________________________________
activation_662 (Activation) (None, 35, 35, 256) 0 batch_normalization_78[0][0]
__________________________________________________________________________________________________
conv2d_79 (Conv2D) (None, 35, 35, 256) 589824 activation_662[0][0]
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, 35, 35, 256) 768 conv2d_79[0][0]
__________________________________________________________________________________________________
activation_663 (Activation) (None, 35, 35, 256) 0 batch_normalization_79[0][0]
__________________________________________________________________________________________________
conv2d_77 (Conv2D) (None, 17, 17, 384) 1105920 block35_10_ac[0][0]
__________________________________________________________________________________________________
conv2d_80 (Conv2D) (None, 17, 17, 384) 884736 activation_663[0][0]
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, 17, 17, 384) 1152 conv2d_77[0][0]
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, 17, 17, 384) 1152 conv2d_80[0][0]
__________________________________________________________________________________________________
activation_661 (Activation) (None, 17, 17, 384) 0 batch_normalization_77[0][0]
__________________________________________________________________________________________________
activation_664 (Activation) (None, 17, 17, 384) 0 batch_normalization_80[0][0]
__________________________________________________________________________________________________
max_pooling2d_15 (MaxPooling2D) (None, 17, 17, 320) 0 block35_10_ac[0][0]
__________________________________________________________________________________________________
mixed_6a (Concatenate) (None, 17, 17, 1088) 0 activation_661[0][0]
activation_664[0][0]
max_pooling2d_15[0][0]
__________________________________________________________________________________________________
conv2d_82 (Conv2D) (None, 17, 17, 128) 139264 mixed_6a[0][0]
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, 17, 17, 128) 384 conv2d_82[0][0]
__________________________________________________________________________________________________
activation_666 (Activation) (None, 17, 17, 128) 0 batch_normalization_82[0][0]
__________________________________________________________________________________________________
conv2d_83 (Conv2D) (None, 17, 17, 160) 143360 activation_666[0][0]
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, 17, 17, 160) 480 conv2d_83[0][0]
__________________________________________________________________________________________________
activation_667 (Activation) (None, 17, 17, 160) 0 batch_normalization_83[0][0]
__________________________________________________________________________________________________
conv2d_81 (Conv2D) (None, 17, 17, 192) 208896 mixed_6a[0][0]
__________________________________________________________________________________________________
conv2d_84 (Conv2D) (None, 17, 17, 192) 215040 activation_667[0][0]
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, 17, 17, 192) 576 conv2d_81[0][0]
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, 17, 17, 192) 576 conv2d_84[0][0]
__________________________________________________________________________________________________
activation_665 (Activation) (None, 17, 17, 192) 0 batch_normalization_81[0][0]
__________________________________________________________________________________________________
activation_668 (Activation) (None, 17, 17, 192) 0 batch_normalization_84[0][0]
__________________________________________________________________________________________________
block17_1_mixed (Concatenate) (None, 17, 17, 384) 0 activation_665[0][0]
activation_668[0][0]
__________________________________________________________________________________________________
block17_1_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_1_mixed[0][0]
__________________________________________________________________________________________________
block17_1 (Lambda) (None, 17, 17, 1088) 0 mixed_6a[0][0]
block17_1_conv[0][0]
__________________________________________________________________________________________________
block17_1_ac (Activation) (None, 17, 17, 1088) 0 block17_1[0][0]
__________________________________________________________________________________________________
conv2d_86 (Conv2D) (None, 17, 17, 128) 139264 block17_1_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, 17, 17, 128) 384 conv2d_86[0][0]
__________________________________________________________________________________________________
activation_670 (Activation) (None, 17, 17, 128) 0 batch_normalization_86[0][0]
__________________________________________________________________________________________________
conv2d_87 (Conv2D) (None, 17, 17, 160) 143360 activation_670[0][0]
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, 17, 17, 160) 480 conv2d_87[0][0]
__________________________________________________________________________________________________
activation_671 (Activation) (None, 17, 17, 160) 0 batch_normalization_87[0][0]
__________________________________________________________________________________________________
conv2d_85 (Conv2D) (None, 17, 17, 192) 208896 block17_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_88 (Conv2D) (None, 17, 17, 192) 215040 activation_671[0][0]
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, 17, 17, 192) 576 conv2d_85[0][0]
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, 17, 17, 192) 576 conv2d_88[0][0]
__________________________________________________________________________________________________
activation_669 (Activation) (None, 17, 17, 192) 0 batch_normalization_85[0][0]
__________________________________________________________________________________________________
activation_672 (Activation) (None, 17, 17, 192) 0 batch_normalization_88[0][0]
__________________________________________________________________________________________________
block17_2_mixed (Concatenate) (None, 17, 17, 384) 0 activation_669[0][0]
activation_672[0][0]
__________________________________________________________________________________________________
block17_2_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_2_mixed[0][0]
__________________________________________________________________________________________________
block17_2 (Lambda) (None, 17, 17, 1088) 0 block17_1_ac[0][0]
block17_2_conv[0][0]
__________________________________________________________________________________________________
block17_2_ac (Activation) (None, 17, 17, 1088) 0 block17_2[0][0]
__________________________________________________________________________________________________
conv2d_90 (Conv2D) (None, 17, 17, 128) 139264 block17_2_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, 17, 17, 128) 384 conv2d_90[0][0]
__________________________________________________________________________________________________
activation_674 (Activation) (None, 17, 17, 128) 0 batch_normalization_90[0][0]
__________________________________________________________________________________________________
conv2d_91 (Conv2D) (None, 17, 17, 160) 143360 activation_674[0][0]
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, 17, 17, 160) 480 conv2d_91[0][0]
__________________________________________________________________________________________________
activation_675 (Activation) (None, 17, 17, 160) 0 batch_normalization_91[0][0]
__________________________________________________________________________________________________
conv2d_89 (Conv2D) (None, 17, 17, 192) 208896 block17_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_92 (Conv2D) (None, 17, 17, 192) 215040 activation_675[0][0]
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, 17, 17, 192) 576 conv2d_89[0][0]
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, 17, 17, 192) 576 conv2d_92[0][0]
__________________________________________________________________________________________________
activation_673 (Activation) (None, 17, 17, 192) 0 batch_normalization_89[0][0]
__________________________________________________________________________________________________
activation_676 (Activation) (None, 17, 17, 192) 0 batch_normalization_92[0][0]
__________________________________________________________________________________________________
block17_3_mixed (Concatenate) (None, 17, 17, 384) 0 activation_673[0][0]
activation_676[0][0]
__________________________________________________________________________________________________
block17_3_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_3_mixed[0][0]
__________________________________________________________________________________________________
block17_3 (Lambda) (None, 17, 17, 1088) 0 block17_2_ac[0][0]
block17_3_conv[0][0]
__________________________________________________________________________________________________
block17_3_ac (Activation) (None, 17, 17, 1088) 0 block17_3[0][0]
__________________________________________________________________________________________________
conv2d_94 (Conv2D) (None, 17, 17, 128) 139264 block17_3_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_94 (BatchNo (None, 17, 17, 128) 384 conv2d_94[0][0]
__________________________________________________________________________________________________
activation_678 (Activation) (None, 17, 17, 128) 0 batch_normalization_94[0][0]
__________________________________________________________________________________________________
conv2d_95 (Conv2D) (None, 17, 17, 160) 143360 activation_678[0][0]
__________________________________________________________________________________________________
batch_normalization_95 (BatchNo (None, 17, 17, 160) 480 conv2d_95[0][0]
__________________________________________________________________________________________________
activation_679 (Activation) (None, 17, 17, 160) 0 batch_normalization_95[0][0]
__________________________________________________________________________________________________
conv2d_93 (Conv2D) (None, 17, 17, 192) 208896 block17_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_96 (Conv2D) (None, 17, 17, 192) 215040 activation_679[0][0]
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, 17, 17, 192) 576 conv2d_93[0][0]
__________________________________________________________________________________________________
batch_normalization_96 (BatchNo (None, 17, 17, 192) 576 conv2d_96[0][0]
__________________________________________________________________________________________________
activation_677 (Activation) (None, 17, 17, 192) 0 batch_normalization_93[0][0]
__________________________________________________________________________________________________
activation_680 (Activation) (None, 17, 17, 192) 0 batch_normalization_96[0][0]
__________________________________________________________________________________________________
block17_4_mixed (Concatenate) (None, 17, 17, 384) 0 activation_677[0][0]
activation_680[0][0]
__________________________________________________________________________________________________
block17_4_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_4_mixed[0][0]
__________________________________________________________________________________________________
block17_4 (Lambda) (None, 17, 17, 1088) 0 block17_3_ac[0][0]
block17_4_conv[0][0]
__________________________________________________________________________________________________
block17_4_ac (Activation) (None, 17, 17, 1088) 0 block17_4[0][0]
__________________________________________________________________________________________________
conv2d_98 (Conv2D) (None, 17, 17, 128) 139264 block17_4_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_98 (BatchNo (None, 17, 17, 128) 384 conv2d_98[0][0]
__________________________________________________________________________________________________
activation_682 (Activation) (None, 17, 17, 128) 0 batch_normalization_98[0][0]
__________________________________________________________________________________________________
conv2d_99 (Conv2D) (None, 17, 17, 160) 143360 activation_682[0][0]
__________________________________________________________________________________________________
batch_normalization_99 (BatchNo (None, 17, 17, 160) 480 conv2d_99[0][0]
__________________________________________________________________________________________________
activation_683 (Activation) (None, 17, 17, 160) 0 batch_normalization_99[0][0]
__________________________________________________________________________________________________
conv2d_97 (Conv2D) (None, 17, 17, 192) 208896 block17_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_100 (Conv2D) (None, 17, 17, 192) 215040 activation_683[0][0]
__________________________________________________________________________________________________
batch_normalization_97 (BatchNo (None, 17, 17, 192) 576 conv2d_97[0][0]
__________________________________________________________________________________________________
batch_normalization_100 (BatchN (None, 17, 17, 192) 576 conv2d_100[0][0]
__________________________________________________________________________________________________
activation_681 (Activation) (None, 17, 17, 192) 0 batch_normalization_97[0][0]
__________________________________________________________________________________________________
activation_684 (Activation) (None, 17, 17, 192) 0 batch_normalization_100[0][0]
__________________________________________________________________________________________________
block17_5_mixed (Concatenate) (None, 17, 17, 384) 0 activation_681[0][0]
activation_684[0][0]
__________________________________________________________________________________________________
block17_5_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_5_mixed[0][0]
__________________________________________________________________________________________________
block17_5 (Lambda) (None, 17, 17, 1088) 0 block17_4_ac[0][0]
block17_5_conv[0][0]
__________________________________________________________________________________________________
block17_5_ac (Activation) (None, 17, 17, 1088) 0 block17_5[0][0]
__________________________________________________________________________________________________
conv2d_102 (Conv2D) (None, 17, 17, 128) 139264 block17_5_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_102 (BatchN (None, 17, 17, 128) 384 conv2d_102[0][0]
__________________________________________________________________________________________________
activation_686 (Activation) (None, 17, 17, 128) 0 batch_normalization_102[0][0]
__________________________________________________________________________________________________
conv2d_103 (Conv2D) (None, 17, 17, 160) 143360 activation_686[0][0]
__________________________________________________________________________________________________
batch_normalization_103 (BatchN (None, 17, 17, 160) 480 conv2d_103[0][0]
__________________________________________________________________________________________________
activation_687 (Activation) (None, 17, 17, 160) 0 batch_normalization_103[0][0]
__________________________________________________________________________________________________
conv2d_101 (Conv2D) (None, 17, 17, 192) 208896 block17_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_104 (Conv2D) (None, 17, 17, 192) 215040 activation_687[0][0]
__________________________________________________________________________________________________
batch_normalization_101 (BatchN (None, 17, 17, 192) 576 conv2d_101[0][0]
__________________________________________________________________________________________________
batch_normalization_104 (BatchN (None, 17, 17, 192) 576 conv2d_104[0][0]
__________________________________________________________________________________________________
activation_685 (Activation) (None, 17, 17, 192) 0 batch_normalization_101[0][0]
__________________________________________________________________________________________________
activation_688 (Activation) (None, 17, 17, 192) 0 batch_normalization_104[0][0]
__________________________________________________________________________________________________
block17_6_mixed (Concatenate) (None, 17, 17, 384) 0 activation_685[0][0]
activation_688[0][0]
__________________________________________________________________________________________________
block17_6_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_6_mixed[0][0]
__________________________________________________________________________________________________
block17_6 (Lambda) (None, 17, 17, 1088) 0 block17_5_ac[0][0]
block17_6_conv[0][0]
__________________________________________________________________________________________________
block17_6_ac (Activation) (None, 17, 17, 1088) 0 block17_6[0][0]
__________________________________________________________________________________________________
conv2d_106 (Conv2D) (None, 17, 17, 128) 139264 block17_6_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_106 (BatchN (None, 17, 17, 128) 384 conv2d_106[0][0]
__________________________________________________________________________________________________
activation_690 (Activation) (None, 17, 17, 128) 0 batch_normalization_106[0][0]
__________________________________________________________________________________________________
conv2d_107 (Conv2D) (None, 17, 17, 160) 143360 activation_690[0][0]
__________________________________________________________________________________________________
batch_normalization_107 (BatchN (None, 17, 17, 160) 480 conv2d_107[0][0]
__________________________________________________________________________________________________
activation_691 (Activation) (None, 17, 17, 160) 0 batch_normalization_107[0][0]
__________________________________________________________________________________________________
conv2d_105 (Conv2D) (None, 17, 17, 192) 208896 block17_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_108 (Conv2D) (None, 17, 17, 192) 215040 activation_691[0][0]
__________________________________________________________________________________________________
batch_normalization_105 (BatchN (None, 17, 17, 192) 576 conv2d_105[0][0]
__________________________________________________________________________________________________
batch_normalization_108 (BatchN (None, 17, 17, 192) 576 conv2d_108[0][0]
__________________________________________________________________________________________________
activation_689 (Activation) (None, 17, 17, 192) 0 batch_normalization_105[0][0]
__________________________________________________________________________________________________
activation_692 (Activation) (None, 17, 17, 192) 0 batch_normalization_108[0][0]
__________________________________________________________________________________________________
block17_7_mixed (Concatenate) (None, 17, 17, 384) 0 activation_689[0][0]
activation_692[0][0]
__________________________________________________________________________________________________
block17_7_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_7_mixed[0][0]
__________________________________________________________________________________________________
block17_7 (Lambda) (None, 17, 17, 1088) 0 block17_6_ac[0][0]
block17_7_conv[0][0]
__________________________________________________________________________________________________
block17_7_ac (Activation) (None, 17, 17, 1088) 0 block17_7[0][0]
__________________________________________________________________________________________________
conv2d_110 (Conv2D) (None, 17, 17, 128) 139264 block17_7_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_110 (BatchN (None, 17, 17, 128) 384 conv2d_110[0][0]
__________________________________________________________________________________________________
activation_694 (Activation) (None, 17, 17, 128) 0 batch_normalization_110[0][0]
__________________________________________________________________________________________________
conv2d_111 (Conv2D) (None, 17, 17, 160) 143360 activation_694[0][0]
__________________________________________________________________________________________________
batch_normalization_111 (BatchN (None, 17, 17, 160) 480 conv2d_111[0][0]
__________________________________________________________________________________________________
activation_695 (Activation) (None, 17, 17, 160) 0 batch_normalization_111[0][0]
__________________________________________________________________________________________________
conv2d_109 (Conv2D) (None, 17, 17, 192) 208896 block17_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_112 (Conv2D) (None, 17, 17, 192) 215040 activation_695[0][0]
__________________________________________________________________________________________________
batch_normalization_109 (BatchN (None, 17, 17, 192) 576 conv2d_109[0][0]
__________________________________________________________________________________________________
batch_normalization_112 (BatchN (None, 17, 17, 192) 576 conv2d_112[0][0]
__________________________________________________________________________________________________
activation_693 (Activation) (None, 17, 17, 192) 0 batch_normalization_109[0][0]
__________________________________________________________________________________________________
activation_696 (Activation) (None, 17, 17, 192) 0 batch_normalization_112[0][0]
__________________________________________________________________________________________________
block17_8_mixed (Concatenate) (None, 17, 17, 384) 0 activation_693[0][0]
activation_696[0][0]
__________________________________________________________________________________________________
block17_8_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_8_mixed[0][0]
__________________________________________________________________________________________________
block17_8 (Lambda) (None, 17, 17, 1088) 0 block17_7_ac[0][0]
block17_8_conv[0][0]
__________________________________________________________________________________________________
block17_8_ac (Activation) (None, 17, 17, 1088) 0 block17_8[0][0]
__________________________________________________________________________________________________
conv2d_114 (Conv2D) (None, 17, 17, 128) 139264 block17_8_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_114 (BatchN (None, 17, 17, 128) 384 conv2d_114[0][0]
__________________________________________________________________________________________________
activation_698 (Activation) (None, 17, 17, 128) 0 batch_normalization_114[0][0]
__________________________________________________________________________________________________
conv2d_115 (Conv2D) (None, 17, 17, 160) 143360 activation_698[0][0]
__________________________________________________________________________________________________
batch_normalization_115 (BatchN (None, 17, 17, 160) 480 conv2d_115[0][0]
__________________________________________________________________________________________________
activation_699 (Activation) (None, 17, 17, 160) 0 batch_normalization_115[0][0]
__________________________________________________________________________________________________
conv2d_113 (Conv2D) (None, 17, 17, 192) 208896 block17_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_116 (Conv2D) (None, 17, 17, 192) 215040 activation_699[0][0]
__________________________________________________________________________________________________
batch_normalization_113 (BatchN (None, 17, 17, 192) 576 conv2d_113[0][0]
__________________________________________________________________________________________________
batch_normalization_116 (BatchN (None, 17, 17, 192) 576 conv2d_116[0][0]
__________________________________________________________________________________________________
activation_697 (Activation) (None, 17, 17, 192) 0 batch_normalization_113[0][0]
__________________________________________________________________________________________________
activation_700 (Activation) (None, 17, 17, 192) 0 batch_normalization_116[0][0]
__________________________________________________________________________________________________
block17_9_mixed (Concatenate) (None, 17, 17, 384) 0 activation_697[0][0]
activation_700[0][0]
__________________________________________________________________________________________________
block17_9_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_9_mixed[0][0]
__________________________________________________________________________________________________
block17_9 (Lambda) (None, 17, 17, 1088) 0 block17_8_ac[0][0]
block17_9_conv[0][0]
__________________________________________________________________________________________________
block17_9_ac (Activation) (None, 17, 17, 1088) 0 block17_9[0][0]
__________________________________________________________________________________________________
conv2d_118 (Conv2D) (None, 17, 17, 128) 139264 block17_9_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_118 (BatchN (None, 17, 17, 128) 384 conv2d_118[0][0]
__________________________________________________________________________________________________
activation_702 (Activation) (None, 17, 17, 128) 0 batch_normalization_118[0][0]
__________________________________________________________________________________________________
conv2d_119 (Conv2D) (None, 17, 17, 160) 143360 activation_702[0][0]
__________________________________________________________________________________________________
batch_normalization_119 (BatchN (None, 17, 17, 160) 480 conv2d_119[0][0]
__________________________________________________________________________________________________
activation_703 (Activation) (None, 17, 17, 160) 0 batch_normalization_119[0][0]
__________________________________________________________________________________________________
conv2d_117 (Conv2D) (None, 17, 17, 192) 208896 block17_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_120 (Conv2D) (None, 17, 17, 192) 215040 activation_703[0][0]
__________________________________________________________________________________________________
batch_normalization_117 (BatchN (None, 17, 17, 192) 576 conv2d_117[0][0]
__________________________________________________________________________________________________
batch_normalization_120 (BatchN (None, 17, 17, 192) 576 conv2d_120[0][0]
__________________________________________________________________________________________________
activation_701 (Activation) (None, 17, 17, 192) 0 batch_normalization_117[0][0]
__________________________________________________________________________________________________
activation_704 (Activation) (None, 17, 17, 192) 0 batch_normalization_120[0][0]
__________________________________________________________________________________________________
block17_10_mixed (Concatenate) (None, 17, 17, 384) 0 activation_701[0][0]
activation_704[0][0]
__________________________________________________________________________________________________
block17_10_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_10_mixed[0][0]
__________________________________________________________________________________________________
block17_10 (Lambda) (None, 17, 17, 1088) 0 block17_9_ac[0][0]
block17_10_conv[0][0]
__________________________________________________________________________________________________
block17_10_ac (Activation) (None, 17, 17, 1088) 0 block17_10[0][0]
__________________________________________________________________________________________________
conv2d_122 (Conv2D) (None, 17, 17, 128) 139264 block17_10_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_122 (BatchN (None, 17, 17, 128) 384 conv2d_122[0][0]
__________________________________________________________________________________________________
activation_706 (Activation) (None, 17, 17, 128) 0 batch_normalization_122[0][0]
__________________________________________________________________________________________________
conv2d_123 (Conv2D) (None, 17, 17, 160) 143360 activation_706[0][0]
__________________________________________________________________________________________________
batch_normalization_123 (BatchN (None, 17, 17, 160) 480 conv2d_123[0][0]
__________________________________________________________________________________________________
activation_707 (Activation) (None, 17, 17, 160) 0 batch_normalization_123[0][0]
__________________________________________________________________________________________________
conv2d_121 (Conv2D) (None, 17, 17, 192) 208896 block17_10_ac[0][0]
__________________________________________________________________________________________________
conv2d_124 (Conv2D) (None, 17, 17, 192) 215040 activation_707[0][0]
__________________________________________________________________________________________________
batch_normalization_121 (BatchN (None, 17, 17, 192) 576 conv2d_121[0][0]
__________________________________________________________________________________________________
batch_normalization_124 (BatchN (None, 17, 17, 192) 576 conv2d_124[0][0]
__________________________________________________________________________________________________
activation_705 (Activation) (None, 17, 17, 192) 0 batch_normalization_121[0][0]
__________________________________________________________________________________________________
activation_708 (Activation) (None, 17, 17, 192) 0 batch_normalization_124[0][0]
__________________________________________________________________________________________________
block17_11_mixed (Concatenate) (None, 17, 17, 384) 0 activation_705[0][0]
activation_708[0][0]
__________________________________________________________________________________________________
block17_11_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_11_mixed[0][0]
__________________________________________________________________________________________________
block17_11 (Lambda) (None, 17, 17, 1088) 0 block17_10_ac[0][0]
block17_11_conv[0][0]
__________________________________________________________________________________________________
block17_11_ac (Activation) (None, 17, 17, 1088) 0 block17_11[0][0]
__________________________________________________________________________________________________
conv2d_126 (Conv2D) (None, 17, 17, 128) 139264 block17_11_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_126 (BatchN (None, 17, 17, 128) 384 conv2d_126[0][0]
__________________________________________________________________________________________________
activation_710 (Activation) (None, 17, 17, 128) 0 batch_normalization_126[0][0]
__________________________________________________________________________________________________
conv2d_127 (Conv2D) (None, 17, 17, 160) 143360 activation_710[0][0]
__________________________________________________________________________________________________
batch_normalization_127 (BatchN (None, 17, 17, 160) 480 conv2d_127[0][0]
__________________________________________________________________________________________________
activation_711 (Activation) (None, 17, 17, 160) 0 batch_normalization_127[0][0]
__________________________________________________________________________________________________
conv2d_125 (Conv2D) (None, 17, 17, 192) 208896 block17_11_ac[0][0]
__________________________________________________________________________________________________
conv2d_128 (Conv2D) (None, 17, 17, 192) 215040 activation_711[0][0]
__________________________________________________________________________________________________
batch_normalization_125 (BatchN (None, 17, 17, 192) 576 conv2d_125[0][0]
__________________________________________________________________________________________________
batch_normalization_128 (BatchN (None, 17, 17, 192) 576 conv2d_128[0][0]
__________________________________________________________________________________________________
activation_709 (Activation) (None, 17, 17, 192) 0 batch_normalization_125[0][0]
__________________________________________________________________________________________________
activation_712 (Activation) (None, 17, 17, 192) 0 batch_normalization_128[0][0]
__________________________________________________________________________________________________
block17_12_mixed (Concatenate) (None, 17, 17, 384) 0 activation_709[0][0]
activation_712[0][0]
__________________________________________________________________________________________________
block17_12_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_12_mixed[0][0]
__________________________________________________________________________________________________
block17_12 (Lambda) (None, 17, 17, 1088) 0 block17_11_ac[0][0]
block17_12_conv[0][0]
__________________________________________________________________________________________________
block17_12_ac (Activation) (None, 17, 17, 1088) 0 block17_12[0][0]
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 17, 17, 128) 139264 block17_12_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 17, 17, 128) 384 conv2d_130[0][0]
__________________________________________________________________________________________________
activation_714 (Activation) (None, 17, 17, 128) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 17, 17, 160) 143360 activation_714[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 17, 17, 160) 480 conv2d_131[0][0]
__________________________________________________________________________________________________
activation_715 (Activation) (None, 17, 17, 160) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
conv2d_129 (Conv2D) (None, 17, 17, 192) 208896 block17_12_ac[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 17, 17, 192) 215040 activation_715[0][0]
__________________________________________________________________________________________________
batch_normalization_129 (BatchN (None, 17, 17, 192) 576 conv2d_129[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 17, 17, 192) 576 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_713 (Activation) (None, 17, 17, 192) 0 batch_normalization_129[0][0]
__________________________________________________________________________________________________
activation_716 (Activation) (None, 17, 17, 192) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
block17_13_mixed (Concatenate) (None, 17, 17, 384) 0 activation_713[0][0]
activation_716[0][0]
__________________________________________________________________________________________________
block17_13_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_13_mixed[0][0]
__________________________________________________________________________________________________
block17_13 (Lambda) (None, 17, 17, 1088) 0 block17_12_ac[0][0]
block17_13_conv[0][0]
__________________________________________________________________________________________________
block17_13_ac (Activation) (None, 17, 17, 1088) 0 block17_13[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 17, 17, 128) 139264 block17_13_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 17, 17, 128) 384 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_718 (Activation) (None, 17, 17, 128) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 17, 17, 160) 143360 activation_718[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 17, 17, 160) 480 conv2d_135[0][0]
__________________________________________________________________________________________________
activation_719 (Activation) (None, 17, 17, 160) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 17, 17, 192) 208896 block17_13_ac[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 17, 17, 192) 215040 activation_719[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 17, 17, 192) 576 conv2d_133[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 17, 17, 192) 576 conv2d_136[0][0]
__________________________________________________________________________________________________
activation_717 (Activation) (None, 17, 17, 192) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
activation_720 (Activation) (None, 17, 17, 192) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
block17_14_mixed (Concatenate) (None, 17, 17, 384) 0 activation_717[0][0]
activation_720[0][0]
__________________________________________________________________________________________________
block17_14_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_14_mixed[0][0]
__________________________________________________________________________________________________
block17_14 (Lambda) (None, 17, 17, 1088) 0 block17_13_ac[0][0]
block17_14_conv[0][0]
__________________________________________________________________________________________________
block17_14_ac (Activation) (None, 17, 17, 1088) 0 block17_14[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 17, 17, 128) 139264 block17_14_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 17, 17, 128) 384 conv2d_138[0][0]
__________________________________________________________________________________________________
activation_722 (Activation) (None, 17, 17, 128) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 17, 17, 160) 143360 activation_722[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 17, 17, 160) 480 conv2d_139[0][0]
__________________________________________________________________________________________________
activation_723 (Activation) (None, 17, 17, 160) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 17, 17, 192) 208896 block17_14_ac[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 17, 17, 192) 215040 activation_723[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 17, 17, 192) 576 conv2d_137[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 17, 17, 192) 576 conv2d_140[0][0]
__________________________________________________________________________________________________
activation_721 (Activation) (None, 17, 17, 192) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_724 (Activation) (None, 17, 17, 192) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
block17_15_mixed (Concatenate) (None, 17, 17, 384) 0 activation_721[0][0]
activation_724[0][0]
__________________________________________________________________________________________________
block17_15_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_15_mixed[0][0]
__________________________________________________________________________________________________
block17_15 (Lambda) (None, 17, 17, 1088) 0 block17_14_ac[0][0]
block17_15_conv[0][0]
__________________________________________________________________________________________________
block17_15_ac (Activation) (None, 17, 17, 1088) 0 block17_15[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 17, 17, 128) 139264 block17_15_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 17, 17, 128) 384 conv2d_142[0][0]
__________________________________________________________________________________________________
activation_726 (Activation) (None, 17, 17, 128) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 17, 17, 160) 143360 activation_726[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 17, 17, 160) 480 conv2d_143[0][0]
__________________________________________________________________________________________________
activation_727 (Activation) (None, 17, 17, 160) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 17, 17, 192) 208896 block17_15_ac[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 17, 17, 192) 215040 activation_727[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 17, 17, 192) 576 conv2d_141[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 17, 17, 192) 576 conv2d_144[0][0]
__________________________________________________________________________________________________
activation_725 (Activation) (None, 17, 17, 192) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
activation_728 (Activation) (None, 17, 17, 192) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
block17_16_mixed (Concatenate) (None, 17, 17, 384) 0 activation_725[0][0]
activation_728[0][0]
__________________________________________________________________________________________________
block17_16_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_16_mixed[0][0]
__________________________________________________________________________________________________
block17_16 (Lambda) (None, 17, 17, 1088) 0 block17_15_ac[0][0]
block17_16_conv[0][0]
__________________________________________________________________________________________________
block17_16_ac (Activation) (None, 17, 17, 1088) 0 block17_16[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 17, 17, 128) 139264 block17_16_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 17, 17, 128) 384 conv2d_146[0][0]
__________________________________________________________________________________________________
activation_730 (Activation) (None, 17, 17, 128) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 17, 17, 160) 143360 activation_730[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 17, 17, 160) 480 conv2d_147[0][0]
__________________________________________________________________________________________________
activation_731 (Activation) (None, 17, 17, 160) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 17, 17, 192) 208896 block17_16_ac[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 17, 17, 192) 215040 activation_731[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 17, 17, 192) 576 conv2d_145[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 17, 17, 192) 576 conv2d_148[0][0]
__________________________________________________________________________________________________
activation_729 (Activation) (None, 17, 17, 192) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
activation_732 (Activation) (None, 17, 17, 192) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
block17_17_mixed (Concatenate) (None, 17, 17, 384) 0 activation_729[0][0]
activation_732[0][0]
__________________________________________________________________________________________________
block17_17_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_17_mixed[0][0]
__________________________________________________________________________________________________
block17_17 (Lambda) (None, 17, 17, 1088) 0 block17_16_ac[0][0]
block17_17_conv[0][0]
__________________________________________________________________________________________________
block17_17_ac (Activation) (None, 17, 17, 1088) 0 block17_17[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 17, 17, 128) 139264 block17_17_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 17, 17, 128) 384 conv2d_150[0][0]
__________________________________________________________________________________________________
activation_734 (Activation) (None, 17, 17, 128) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 17, 17, 160) 143360 activation_734[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 17, 17, 160) 480 conv2d_151[0][0]
__________________________________________________________________________________________________
activation_735 (Activation) (None, 17, 17, 160) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 17, 17, 192) 208896 block17_17_ac[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 17, 17, 192) 215040 activation_735[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 17, 17, 192) 576 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 17, 17, 192) 576 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_733 (Activation) (None, 17, 17, 192) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
activation_736 (Activation) (None, 17, 17, 192) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
block17_18_mixed (Concatenate) (None, 17, 17, 384) 0 activation_733[0][0]
activation_736[0][0]
__________________________________________________________________________________________________
block17_18_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_18_mixed[0][0]
__________________________________________________________________________________________________
block17_18 (Lambda) (None, 17, 17, 1088) 0 block17_17_ac[0][0]
block17_18_conv[0][0]
__________________________________________________________________________________________________
block17_18_ac (Activation) (None, 17, 17, 1088) 0 block17_18[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 17, 17, 128) 139264 block17_18_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 17, 17, 128) 384 conv2d_154[0][0]
__________________________________________________________________________________________________
activation_738 (Activation) (None, 17, 17, 128) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 17, 17, 160) 143360 activation_738[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 17, 17, 160) 480 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_739 (Activation) (None, 17, 17, 160) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 17, 17, 192) 208896 block17_18_ac[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 17, 17, 192) 215040 activation_739[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 17, 17, 192) 576 conv2d_153[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 17, 17, 192) 576 conv2d_156[0][0]
__________________________________________________________________________________________________
activation_737 (Activation) (None, 17, 17, 192) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
activation_740 (Activation) (None, 17, 17, 192) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
block17_19_mixed (Concatenate) (None, 17, 17, 384) 0 activation_737[0][0]
activation_740[0][0]
__________________________________________________________________________________________________
block17_19_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_19_mixed[0][0]
__________________________________________________________________________________________________
block17_19 (Lambda) (None, 17, 17, 1088) 0 block17_18_ac[0][0]
block17_19_conv[0][0]
__________________________________________________________________________________________________
block17_19_ac (Activation) (None, 17, 17, 1088) 0 block17_19[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 17, 17, 128) 139264 block17_19_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 17, 17, 128) 384 conv2d_158[0][0]
__________________________________________________________________________________________________
activation_742 (Activation) (None, 17, 17, 128) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 17, 17, 160) 143360 activation_742[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 17, 17, 160) 480 conv2d_159[0][0]
__________________________________________________________________________________________________
activation_743 (Activation) (None, 17, 17, 160) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 17, 17, 192) 208896 block17_19_ac[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 17, 17, 192) 215040 activation_743[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 17, 17, 192) 576 conv2d_157[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 17, 17, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
activation_741 (Activation) (None, 17, 17, 192) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
activation_744 (Activation) (None, 17, 17, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
block17_20_mixed (Concatenate) (None, 17, 17, 384) 0 activation_741[0][0]
activation_744[0][0]
__________________________________________________________________________________________________
block17_20_conv (Conv2D) (None, 17, 17, 1088) 418880 block17_20_mixed[0][0]
__________________________________________________________________________________________________
block17_20 (Lambda) (None, 17, 17, 1088) 0 block17_19_ac[0][0]
block17_20_conv[0][0]
__________________________________________________________________________________________________
block17_20_ac (Activation) (None, 17, 17, 1088) 0 block17_20[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 17, 17, 256) 278528 block17_20_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 17, 17, 256) 768 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_749 (Activation) (None, 17, 17, 256) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 17, 17, 256) 278528 block17_20_ac[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 17, 17, 256) 278528 block17_20_ac[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 17, 17, 288) 663552 activation_749[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 17, 17, 256) 768 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 17, 17, 256) 768 conv2d_163[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 17, 17, 288) 864 conv2d_166[0][0]
__________________________________________________________________________________________________
activation_745 (Activation) (None, 17, 17, 256) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
activation_747 (Activation) (None, 17, 17, 256) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
activation_750 (Activation) (None, 17, 17, 288) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 8, 8, 384) 884736 activation_745[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 8, 8, 288) 663552 activation_747[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 8, 8, 320) 829440 activation_750[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 8, 8, 384) 1152 conv2d_162[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 8, 8, 288) 864 conv2d_164[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 8, 8, 320) 960 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_746 (Activation) (None, 8, 8, 384) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_748 (Activation) (None, 8, 8, 288) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
activation_751 (Activation) (None, 8, 8, 320) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
max_pooling2d_16 (MaxPooling2D) (None, 8, 8, 1088) 0 block17_20_ac[0][0]
__________________________________________________________________________________________________
mixed_7a (Concatenate) (None, 8, 8, 2080) 0 activation_746[0][0]
activation_748[0][0]
activation_751[0][0]
max_pooling2d_16[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 8, 8, 192) 399360 mixed_7a[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 8, 8, 192) 576 conv2d_169[0][0]
__________________________________________________________________________________________________
activation_753 (Activation) (None, 8, 8, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 8, 8, 224) 129024 activation_753[0][0]
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 8, 8, 224) 672 conv2d_170[0][0]
__________________________________________________________________________________________________
activation_754 (Activation) (None, 8, 8, 224) 0 batch_normalization_170[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 8, 8, 192) 399360 mixed_7a[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 8, 8, 256) 172032 activation_754[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 8, 8, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 8, 8, 256) 768 conv2d_171[0][0]
__________________________________________________________________________________________________
activation_752 (Activation) (None, 8, 8, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
activation_755 (Activation) (None, 8, 8, 256) 0 batch_normalization_171[0][0]
__________________________________________________________________________________________________
block8_1_mixed (Concatenate) (None, 8, 8, 448) 0 activation_752[0][0]
activation_755[0][0]
__________________________________________________________________________________________________
block8_1_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_1_mixed[0][0]
__________________________________________________________________________________________________
block8_1 (Lambda) (None, 8, 8, 2080) 0 mixed_7a[0][0]
block8_1_conv[0][0]
__________________________________________________________________________________________________
block8_1_ac (Activation) (None, 8, 8, 2080) 0 block8_1[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 8, 8, 192) 399360 block8_1_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 8, 8, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
activation_757 (Activation) (None, 8, 8, 192) 0 batch_normalization_173[0][0]
__________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 8, 8, 224) 129024 activation_757[0][0]
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 8, 8, 224) 672 conv2d_174[0][0]
__________________________________________________________________________________________________
activation_758 (Activation) (None, 8, 8, 224) 0 batch_normalization_174[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 8, 8, 192) 399360 block8_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 8, 8, 256) 172032 activation_758[0][0]
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 8, 8, 192) 576 conv2d_172[0][0]
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 8, 8, 256) 768 conv2d_175[0][0]
__________________________________________________________________________________________________
activation_756 (Activation) (None, 8, 8, 192) 0 batch_normalization_172[0][0]
__________________________________________________________________________________________________
activation_759 (Activation) (None, 8, 8, 256) 0 batch_normalization_175[0][0]
__________________________________________________________________________________________________
block8_2_mixed (Concatenate) (None, 8, 8, 448) 0 activation_756[0][0]
activation_759[0][0]
__________________________________________________________________________________________________
block8_2_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_2_mixed[0][0]
__________________________________________________________________________________________________
block8_2 (Lambda) (None, 8, 8, 2080) 0 block8_1_ac[0][0]
block8_2_conv[0][0]
__________________________________________________________________________________________________
block8_2_ac (Activation) (None, 8, 8, 2080) 0 block8_2[0][0]
__________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 8, 8, 192) 399360 block8_2_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 8, 8, 192) 576 conv2d_177[0][0]
__________________________________________________________________________________________________
activation_761 (Activation) (None, 8, 8, 192) 0 batch_normalization_177[0][0]
__________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 8, 8, 224) 129024 activation_761[0][0]
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 8, 8, 224) 672 conv2d_178[0][0]
__________________________________________________________________________________________________
activation_762 (Activation) (None, 8, 8, 224) 0 batch_normalization_178[0][0]
__________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 8, 8, 192) 399360 block8_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 8, 8, 256) 172032 activation_762[0][0]
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 8, 8, 192) 576 conv2d_176[0][0]
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 8, 8, 256) 768 conv2d_179[0][0]
__________________________________________________________________________________________________
activation_760 (Activation) (None, 8, 8, 192) 0 batch_normalization_176[0][0]
__________________________________________________________________________________________________
activation_763 (Activation) (None, 8, 8, 256) 0 batch_normalization_179[0][0]
__________________________________________________________________________________________________
block8_3_mixed (Concatenate) (None, 8, 8, 448) 0 activation_760[0][0]
activation_763[0][0]
__________________________________________________________________________________________________
block8_3_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_3_mixed[0][0]
__________________________________________________________________________________________________
block8_3 (Lambda) (None, 8, 8, 2080) 0 block8_2_ac[0][0]
block8_3_conv[0][0]
__________________________________________________________________________________________________
block8_3_ac (Activation) (None, 8, 8, 2080) 0 block8_3[0][0]
__________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 8, 8, 192) 399360 block8_3_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 8, 8, 192) 576 conv2d_181[0][0]
__________________________________________________________________________________________________
activation_765 (Activation) (None, 8, 8, 192) 0 batch_normalization_181[0][0]
__________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 8, 8, 224) 129024 activation_765[0][0]
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 8, 8, 224) 672 conv2d_182[0][0]
__________________________________________________________________________________________________
activation_766 (Activation) (None, 8, 8, 224) 0 batch_normalization_182[0][0]
__________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 8, 8, 192) 399360 block8_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 8, 8, 256) 172032 activation_766[0][0]
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 8, 8, 192) 576 conv2d_180[0][0]
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 8, 8, 256) 768 conv2d_183[0][0]
__________________________________________________________________________________________________
activation_764 (Activation) (None, 8, 8, 192) 0 batch_normalization_180[0][0]
__________________________________________________________________________________________________
activation_767 (Activation) (None, 8, 8, 256) 0 batch_normalization_183[0][0]
__________________________________________________________________________________________________
block8_4_mixed (Concatenate) (None, 8, 8, 448) 0 activation_764[0][0]
activation_767[0][0]
__________________________________________________________________________________________________
block8_4_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_4_mixed[0][0]
__________________________________________________________________________________________________
block8_4 (Lambda) (None, 8, 8, 2080) 0 block8_3_ac[0][0]
block8_4_conv[0][0]
__________________________________________________________________________________________________
block8_4_ac (Activation) (None, 8, 8, 2080) 0 block8_4[0][0]
__________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 8, 8, 192) 399360 block8_4_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 8, 8, 192) 576 conv2d_185[0][0]
__________________________________________________________________________________________________
activation_769 (Activation) (None, 8, 8, 192) 0 batch_normalization_185[0][0]
__________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 8, 8, 224) 129024 activation_769[0][0]
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 8, 8, 224) 672 conv2d_186[0][0]
__________________________________________________________________________________________________
activation_770 (Activation) (None, 8, 8, 224) 0 batch_normalization_186[0][0]
__________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 8, 8, 192) 399360 block8_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 8, 8, 256) 172032 activation_770[0][0]
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 8, 8, 192) 576 conv2d_184[0][0]
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 8, 8, 256) 768 conv2d_187[0][0]
__________________________________________________________________________________________________
activation_768 (Activation) (None, 8, 8, 192) 0 batch_normalization_184[0][0]
__________________________________________________________________________________________________
activation_771 (Activation) (None, 8, 8, 256) 0 batch_normalization_187[0][0]
__________________________________________________________________________________________________
block8_5_mixed (Concatenate) (None, 8, 8, 448) 0 activation_768[0][0]
activation_771[0][0]
__________________________________________________________________________________________________
block8_5_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_5_mixed[0][0]
__________________________________________________________________________________________________
block8_5 (Lambda) (None, 8, 8, 2080) 0 block8_4_ac[0][0]
block8_5_conv[0][0]
__________________________________________________________________________________________________
block8_5_ac (Activation) (None, 8, 8, 2080) 0 block8_5[0][0]
__________________________________________________________________________________________________
conv2d_189 (Conv2D) (None, 8, 8, 192) 399360 block8_5_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 8, 8, 192) 576 conv2d_189[0][0]
__________________________________________________________________________________________________
activation_773 (Activation) (None, 8, 8, 192) 0 batch_normalization_189[0][0]
__________________________________________________________________________________________________
conv2d_190 (Conv2D) (None, 8, 8, 224) 129024 activation_773[0][0]
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 8, 8, 224) 672 conv2d_190[0][0]
__________________________________________________________________________________________________
activation_774 (Activation) (None, 8, 8, 224) 0 batch_normalization_190[0][0]
__________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 8, 8, 192) 399360 block8_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_191 (Conv2D) (None, 8, 8, 256) 172032 activation_774[0][0]
__________________________________________________________________________________________________
batch_normalization_188 (BatchN (None, 8, 8, 192) 576 conv2d_188[0][0]
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 8, 8, 256) 768 conv2d_191[0][0]
__________________________________________________________________________________________________
activation_772 (Activation) (None, 8, 8, 192) 0 batch_normalization_188[0][0]
__________________________________________________________________________________________________
activation_775 (Activation) (None, 8, 8, 256) 0 batch_normalization_191[0][0]
__________________________________________________________________________________________________
block8_6_mixed (Concatenate) (None, 8, 8, 448) 0 activation_772[0][0]
activation_775[0][0]
__________________________________________________________________________________________________
block8_6_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_6_mixed[0][0]
__________________________________________________________________________________________________
block8_6 (Lambda) (None, 8, 8, 2080) 0 block8_5_ac[0][0]
block8_6_conv[0][0]
__________________________________________________________________________________________________
block8_6_ac (Activation) (None, 8, 8, 2080) 0 block8_6[0][0]
__________________________________________________________________________________________________
conv2d_193 (Conv2D) (None, 8, 8, 192) 399360 block8_6_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 8, 8, 192) 576 conv2d_193[0][0]
__________________________________________________________________________________________________
activation_777 (Activation) (None, 8, 8, 192) 0 batch_normalization_193[0][0]
__________________________________________________________________________________________________
conv2d_194 (Conv2D) (None, 8, 8, 224) 129024 activation_777[0][0]
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 8, 8, 224) 672 conv2d_194[0][0]
__________________________________________________________________________________________________
activation_778 (Activation) (None, 8, 8, 224) 0 batch_normalization_194[0][0]
__________________________________________________________________________________________________
conv2d_192 (Conv2D) (None, 8, 8, 192) 399360 block8_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_195 (Conv2D) (None, 8, 8, 256) 172032 activation_778[0][0]
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 8, 8, 192) 576 conv2d_192[0][0]
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 8, 8, 256) 768 conv2d_195[0][0]
__________________________________________________________________________________________________
activation_776 (Activation) (None, 8, 8, 192) 0 batch_normalization_192[0][0]
__________________________________________________________________________________________________
activation_779 (Activation) (None, 8, 8, 256) 0 batch_normalization_195[0][0]
__________________________________________________________________________________________________
block8_7_mixed (Concatenate) (None, 8, 8, 448) 0 activation_776[0][0]
activation_779[0][0]
__________________________________________________________________________________________________
block8_7_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_7_mixed[0][0]
__________________________________________________________________________________________________
block8_7 (Lambda) (None, 8, 8, 2080) 0 block8_6_ac[0][0]
block8_7_conv[0][0]
__________________________________________________________________________________________________
block8_7_ac (Activation) (None, 8, 8, 2080) 0 block8_7[0][0]
__________________________________________________________________________________________________
conv2d_197 (Conv2D) (None, 8, 8, 192) 399360 block8_7_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 8, 8, 192) 576 conv2d_197[0][0]
__________________________________________________________________________________________________
activation_781 (Activation) (None, 8, 8, 192) 0 batch_normalization_197[0][0]
__________________________________________________________________________________________________
conv2d_198 (Conv2D) (None, 8, 8, 224) 129024 activation_781[0][0]
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 8, 8, 224) 672 conv2d_198[0][0]
__________________________________________________________________________________________________
activation_782 (Activation) (None, 8, 8, 224) 0 batch_normalization_198[0][0]
__________________________________________________________________________________________________
conv2d_196 (Conv2D) (None, 8, 8, 192) 399360 block8_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_199 (Conv2D) (None, 8, 8, 256) 172032 activation_782[0][0]
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 8, 8, 192) 576 conv2d_196[0][0]
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 8, 8, 256) 768 conv2d_199[0][0]
__________________________________________________________________________________________________
activation_780 (Activation) (None, 8, 8, 192) 0 batch_normalization_196[0][0]
__________________________________________________________________________________________________
activation_783 (Activation) (None, 8, 8, 256) 0 batch_normalization_199[0][0]
__________________________________________________________________________________________________
block8_8_mixed (Concatenate) (None, 8, 8, 448) 0 activation_780[0][0]
activation_783[0][0]
__________________________________________________________________________________________________
block8_8_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_8_mixed[0][0]
__________________________________________________________________________________________________
block8_8 (Lambda) (None, 8, 8, 2080) 0 block8_7_ac[0][0]
block8_8_conv[0][0]
__________________________________________________________________________________________________
block8_8_ac (Activation) (None, 8, 8, 2080) 0 block8_8[0][0]
__________________________________________________________________________________________________
conv2d_201 (Conv2D) (None, 8, 8, 192) 399360 block8_8_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 8, 8, 192) 576 conv2d_201[0][0]
__________________________________________________________________________________________________
activation_785 (Activation) (None, 8, 8, 192) 0 batch_normalization_201[0][0]
__________________________________________________________________________________________________
conv2d_202 (Conv2D) (None, 8, 8, 224) 129024 activation_785[0][0]
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 8, 8, 224) 672 conv2d_202[0][0]
__________________________________________________________________________________________________
activation_786 (Activation) (None, 8, 8, 224) 0 batch_normalization_202[0][0]
__________________________________________________________________________________________________
conv2d_200 (Conv2D) (None, 8, 8, 192) 399360 block8_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_203 (Conv2D) (None, 8, 8, 256) 172032 activation_786[0][0]
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 8, 8, 192) 576 conv2d_200[0][0]
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 8, 8, 256) 768 conv2d_203[0][0]
__________________________________________________________________________________________________
activation_784 (Activation) (None, 8, 8, 192) 0 batch_normalization_200[0][0]
__________________________________________________________________________________________________
activation_787 (Activation) (None, 8, 8, 256) 0 batch_normalization_203[0][0]
__________________________________________________________________________________________________
block8_9_mixed (Concatenate) (None, 8, 8, 448) 0 activation_784[0][0]
activation_787[0][0]
__________________________________________________________________________________________________
block8_9_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_9_mixed[0][0]
__________________________________________________________________________________________________
block8_9 (Lambda) (None, 8, 8, 2080) 0 block8_8_ac[0][0]
block8_9_conv[0][0]
__________________________________________________________________________________________________
block8_9_ac (Activation) (None, 8, 8, 2080) 0 block8_9[0][0]
__________________________________________________________________________________________________
conv2d_205 (Conv2D) (None, 8, 8, 192) 399360 block8_9_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 8, 8, 192) 576 conv2d_205[0][0]
__________________________________________________________________________________________________
activation_789 (Activation) (None, 8, 8, 192) 0 batch_normalization_205[0][0]
__________________________________________________________________________________________________
conv2d_206 (Conv2D) (None, 8, 8, 224) 129024 activation_789[0][0]
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 8, 8, 224) 672 conv2d_206[0][0]
__________________________________________________________________________________________________
activation_790 (Activation) (None, 8, 8, 224) 0 batch_normalization_206[0][0]
__________________________________________________________________________________________________
conv2d_204 (Conv2D) (None, 8, 8, 192) 399360 block8_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_207 (Conv2D) (None, 8, 8, 256) 172032 activation_790[0][0]
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 8, 8, 192) 576 conv2d_204[0][0]
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 8, 8, 256) 768 conv2d_207[0][0]
__________________________________________________________________________________________________
activation_788 (Activation) (None, 8, 8, 192) 0 batch_normalization_204[0][0]
__________________________________________________________________________________________________
activation_791 (Activation) (None, 8, 8, 256) 0 batch_normalization_207[0][0]
__________________________________________________________________________________________________
block8_10_mixed (Concatenate) (None, 8, 8, 448) 0 activation_788[0][0]
activation_791[0][0]
__________________________________________________________________________________________________
block8_10_conv (Conv2D) (None, 8, 8, 2080) 933920 block8_10_mixed[0][0]
__________________________________________________________________________________________________
block8_10 (Lambda) (None, 8, 8, 2080) 0 block8_9_ac[0][0]
block8_10_conv[0][0]
__________________________________________________________________________________________________
conv_7b (Conv2D) (None, 8, 8, 1536) 3194880 block8_10[0][0]
__________________________________________________________________________________________________
conv_7b_bn (BatchNormalization) (None, 8, 8, 1536) 4608 conv_7b[0][0]
__________________________________________________________________________________________________
conv_7b_ac (Activation) (None, 8, 8, 1536) 0 conv_7b_bn[0][0]
__________________________________________________________________________________________________
global_average_pooling2d_14 (Gl (None, 1536) 0 conv_7b_ac[0][0]
__________________________________________________________________________________________________
dropout_14 (Dropout) (None, 1536) 0 global_average_pooling2d_14[0][0]
__________________________________________________________________________________________________
dense_14 (Dense) (None, 1) 1537 dropout_14[0][0]
==================================================================================================
Total params: 54,338,273
Trainable params: 12,971,201
Non-trainable params: 41,367,072
__________________________________________________________________________________________________
result = tl_inception_restnet_v2.model_fit()
Epoch 1/8 13/13 [==============================] - 76s 6s/step - loss: 0.1983 - acc: 0.9266 - val_loss: 0.6242 - val_acc: 0.9100 Epoch 00001: saving model to inception_res_v2_01_0.6242.hdf5 Epoch 2/8 13/13 [==============================] - 52s 4s/step - loss: 0.0564 - acc: 0.9790 - val_loss: 2.2455 - val_acc: 0.6750 Epoch 00002: saving model to inception_res_v2_02_2.2455.hdf5 Epoch 3/8 13/13 [==============================] - 52s 4s/step - loss: 0.0240 - acc: 0.9904 - val_loss: 0.1097 - val_acc: 0.9775 Epoch 00003: saving model to inception_res_v2_03_0.1097.hdf5 Epoch 4/8 13/13 [==============================] - 52s 4s/step - loss: 0.0114 - acc: 0.9952 - val_loss: 0.1691 - val_acc: 0.9725 Epoch 00004: saving model to inception_res_v2_04_0.1691.hdf5 Epoch 5/8 13/13 [==============================] - 52s 4s/step - loss: 0.0033 - acc: 0.9988 - val_loss: 0.0268 - val_acc: 0.9900 Epoch 00005: saving model to inception_res_v2_05_0.0268.hdf5 Epoch 6/8 13/13 [==============================] - 52s 4s/step - loss: 0.0021 - acc: 0.9994 - val_loss: 0.0362 - val_acc: 0.9900 Epoch 00006: saving model to inception_res_v2_06_0.0362.hdf5 Epoch 7/8 13/13 [==============================] - 52s 4s/step - loss: 0.0061 - acc: 0.9988 - val_loss: 0.0367 - val_acc: 0.9925 Epoch 00007: saving model to inception_res_v2_07_0.0367.hdf5 Epoch 8/8 13/13 [==============================] - 52s 4s/step - loss: 0.0062 - acc: 0.9982 - val_loss: 0.1067 - val_acc: 0.9775 Epoch 00008: saving model to inception_res_v2_08_0.1067.hdf5
tl_inception_restnet_v2.save_training_log()
tl_inception_restnet_v2.load_training_log()
tl_inception_restnet_v2.visualize_trainning()
Saving trainning history to file: train_log_inception_res_v2.npz Restoring trainning history from file: train_log_inception_res_v2.npz
加载最优模型(val_loss最小)
best_model = tl_inception_restnet_v2.find_best_model()
tl_inception_restnet_v2.load_model(best_model)
The best model is: inception_res_v2_05_0.0268.hdf5
将预测结果输出至csv文件
tl_inception_restnet_v2.model_pred()
tl_inception_restnet_v2.save_test_csv()
8/8 [==============================] - 35s 4s/step Saving test result on: pred_inception_res_v2.csv
分别针对原始图像和使用数据增强变换后的图像
tl_inception_restnet_v2.hl_feature_extractor(TRAIN_GEN_EX, feature_enh=False)
tl_inception_restnet_v2.hl_feature_extractor(TRAIN_GEN_EX, feature_enh=True)
X_train = []
X_test = []
weight = [0.2, 0.2, 0.6]
#“feature_resnet50.h5”, "feature_xception.h5", feature_inception_v3.h5
for i, filename in enumerate(["feature_resnet50.h5", "feature_xception.h5", "feature_inception_res_v2.h5"]):
with h5py.File(filename, 'r') as h:
X_train.append(weight[i]*np.array(h['train']))
X_test.append(weight[i]*np.array(h['test']))
y_train = np.array(h['label'])
X_train_load = np.concatenate(X_train, axis=1)
y_train_load = y_train
X_test = np.concatenate(X_test, axis=1)
X_train = X_train_load
y_train = y_train_load
gen = ImageDataGenerator()
train_gen = gen.flow_from_directory(
'./train_gen_ex',
target_size=(224,224),
shuffle=False,
batch_size=128,
class_mode=None,
)
filenames = train_gen.filenames
Found 25000 images belonging to 2 classes.
将各模型提取的特征拼接在一起
import h5py
import numpy as np
from sklearn.utils import shuffle
X_train_enh = []
weight = [1, 1, 1]
for i,filename in enumerate(["feature_enh_resnet50.h5", "feature_enh_xception.h5", "feature_enh_inception_res_v2.h5"]):
with h5py.File(filename, 'r') as h:
X_train_enh.append(weight[i]*np.array(h['train']))
y_train_enh = np.array(h['label'])
X_train_enh = np.concatenate(X_train_enh, axis=1)
X_train, y_train, filenames = shuffle(X_train, y_train, filenames, random_state=42)
X_train_enh, y_train_enh = shuffle(X_train_enh, y_train_enh, random_state=42)
np.random.seed(42)
def get_nn_clf():
input_tensor = Input(X_train.shape[1:])
x = input_tensor
x = Dropout(0.6)(x)
x = Dense(1, activation='sigmoid', kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None)(x)
model = Model(input_tensor, x)
sgd = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)
adam = optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
model.compile(optimizer=adam,
loss='binary_crossentropy',
metrics=['accuracy'])
return model
epochs = 50
patience = 0
batch_size = 128
from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau
check_pt = ModelCheckpoint(
'ensumble_{epoch:02d}_{val_loss:.4f}.hdf5',
monitor='val_loss',
verbose=1,
save_best_only=True,
save_weights_only=False,
period=1
)
early_stop = EarlyStopping(
monitor='val_loss',
min_delta=0.00005,
patience=patience,
verbose=1,
mode='auto'
)
reduce_lr = ReduceLROnPlateau(
monitor='val_loss',
factor=0.1,
patience=3,
verbose=1,
epsilon=0.0001,
mode='auto',
)
model = get_nn_clf()
result = model.fit(X_train,
y_train, batch_size=batch_size,
epochs=10, validation_split=0.2, shuffle=False, callbacks=[])
Train on 20000 samples, validate on 5000 samples Epoch 1/10 20000/20000 [==============================] - 2s 81us/step - loss: 0.0238 - acc: 0.9946 - val_loss: 0.0073 - val_acc: 0.9978 Epoch 2/10 20000/20000 [==============================] - 1s 51us/step - loss: 0.0076 - acc: 0.9978 - val_loss: 0.0062 - val_acc: 0.9976 Epoch 3/10 20000/20000 [==============================] - 1s 51us/step - loss: 0.0070 - acc: 0.9977 - val_loss: 0.0058 - val_acc: 0.9978 Epoch 4/10 20000/20000 [==============================] - 1s 60us/step - loss: 0.0065 - acc: 0.9978 - val_loss: 0.0054 - val_acc: 0.9978 Epoch 5/10 20000/20000 [==============================] - 1s 52us/step - loss: 0.0062 - acc: 0.9979 - val_loss: 0.0052 - val_acc: 0.9978 Epoch 6/10 20000/20000 [==============================] - 1s 59us/step - loss: 0.0057 - acc: 0.9981 - val_loss: 0.0049 - val_acc: 0.9980 Epoch 7/10 20000/20000 [==============================] - 1s 60us/step - loss: 0.0055 - acc: 0.9981 - val_loss: 0.0047 - val_acc: 0.9980 Epoch 8/10 20000/20000 [==============================] - 1s 57us/step - loss: 0.0052 - acc: 0.9983 - val_loss: 0.0045 - val_acc: 0.9982 Epoch 9/10 20000/20000 [==============================] - 1s 63us/step - loss: 0.0050 - acc: 0.9981 - val_loss: 0.0044 - val_acc: 0.9980 Epoch 10/10 20000/20000 [==============================] - 1s 56us/step - loss: 0.0047 - acc: 0.9983 - val_loss: 0.0043 - val_acc: 0.9980
epoch = list(map(lambda x:x+1, result.epoch))
visualize_epochs(epoch, result.history, 'Trainning History of Muli-Model Feature Extraction')
gen = ImageDataGenerator()
test_gen = gen.flow_from_directory(
'./test_gen',
target_size=(224,224),
shuffle=False,
batch_size=128,
class_mode=None,
)
Found 12500 images belonging to 1 classes.
y_pred = model.predict(X_test, verbose=1)
df = save_test_to_csv(y_pred, 'merge', test_gen)
12500/12500 [==============================] - 0s 33us/step Saving test result on: pred_merge.csv
将训练好的模型对所有训练数据进行预测,并与实际标签对比,取出预测结果与标签不符图片
train_pred = model.predict(X_train, verbose=1)
25000/25000 [==============================] - 1s 29us/step
pred = (train_pred >= 0.5)*1
pred = pred[:, 0]
error = [i for i,(p,y) in enumerate(zip(pred, y_train)) if p != y]
len(error)
44
outlier_2 = [(filenames[e].split('/')[1], y_train[e], train_pred[e]) for e in error
if (y_train[e]==0 and train_pred[e]>=0.65) or (y_train[e]==1 and train_pred[e]<=0.35)]
for f,_,_ in [*outlier_2]:
print('\''+f+'\',')
print('Total outlier is {}'.format(len(outlier_2)))
'cat.4688.jpg', 'cat.12272.jpg', 'dog.10179.jpg', 'dog.12142.jpg', 'dog.2542.jpg', 'cat.11399.jpg', 'cat.6699.jpg', 'cat.7920.jpg', 'cat.5241.jpg', 'cat.10266.jpg', 'dog.5767.jpg', 'cat.3658.jpg', 'dog.4334.jpg', 'dog.5529.jpg', 'dog.6921.jpg', 'cat.8456.jpg', 'dog.9150.jpg', 'dog.8507.jpg', 'cat.4085.jpg', 'cat.9250.jpg', 'dog.11731.jpg', 'dog.6405.jpg', 'cat.6906.jpg', 'cat.724.jpg', Total outlier is 24
plt.figure(figsize=(13, 11))
for i, (f,_,p) in enumerate([*outlier_2][:36]):
img = cv2.resize(plt.imread(os.path.join(train_folder, f)), (300, 300))
plt.subplot(4, 6, i+1)
plt.title(f + '\n' + 'Predict: {:.2f}%'.format(p[0]*100))
plt.axis('off')
plt.imshow(img)
plt.show()
outlier = [
'cat.4688.jpg',
'cat.12272.jpg',
'dog.10179.jpg',
'dog.12142.jpg',
'dog.2542.jpg',
'cat.11399.jpg',
'cat.6699.jpg',
'cat.7920.jpg',
'cat.5241.jpg',
'cat.10266.jpg',
'dog.5767.jpg',
'cat.3658.jpg',
'dog.4334.jpg',
'dog.5529.jpg',
'dog.6921.jpg',
'cat.8456.jpg',
'dog.9150.jpg',
'dog.8507.jpg',
'cat.4085.jpg',
'cat.9250.jpg',
'dog.11731.jpg',
'dog.6405.jpg',
'cat.6906.jpg',
'cat.724.jpg',
]
outlier_n = [i for i,fn in enumerate(train_gen.filenames) if fn.split('/')[1] in outlier]
X_train = np.array([x for i,x in enumerate(X_train_load) if i not in outlier_n])
X_train.shape
(24976, 5632)
y_train = np.array([x for i,x in enumerate(y_train_load) if i not in outlier_n])
y_train.shape
(24976,)
filenames = [x for i,x in enumerate(train_gen.filenames) if i not in outlier_n]
len(filenames)
24976
X_train, y_train, filenames = shuffle(X_train, y_train, filenames, random_state=42)
np.random.seed(42)
model = get_nn_clf()
result = model.fit(X_train,
y_train, batch_size=batch_size,
epochs=18, validation_split=0.2, shuffle=False, callbacks=[])
Train on 19980 samples, validate on 4996 samples Epoch 1/18 19980/19980 [==============================] - 2s 94us/step - loss: 0.0210 - acc: 0.9959 - val_loss: 0.0069 - val_acc: 0.9982 Epoch 2/18 19980/19980 [==============================] - 1s 50us/step - loss: 0.0046 - acc: 0.9987 - val_loss: 0.0057 - val_acc: 0.9982 Epoch 3/18 19980/19980 [==============================] - 1s 50us/step - loss: 0.0038 - acc: 0.9985 - val_loss: 0.0052 - val_acc: 0.9982 Epoch 4/18 19980/19980 [==============================] - 1s 50us/step - loss: 0.0034 - acc: 0.9987 - val_loss: 0.0049 - val_acc: 0.9982 Epoch 5/18 19980/19980 [==============================] - 1s 51us/step - loss: 0.0030 - acc: 0.9987 - val_loss: 0.0047 - val_acc: 0.9982 Epoch 6/18 19980/19980 [==============================] - 1s 61us/step - loss: 0.0028 - acc: 0.9988 - val_loss: 0.0046 - val_acc: 0.9982 Epoch 7/18 19980/19980 [==============================] - 1s 61us/step - loss: 0.0027 - acc: 0.9988 - val_loss: 0.0044 - val_acc: 0.9984 Epoch 8/18 19980/19980 [==============================] - 1s 53us/step - loss: 0.0024 - acc: 0.9990 - val_loss: 0.0043 - val_acc: 0.9986 Epoch 9/18 19980/19980 [==============================] - 1s 63us/step - loss: 0.0024 - acc: 0.9990 - val_loss: 0.0042 - val_acc: 0.9986 Epoch 10/18 19980/19980 [==============================] - 1s 57us/step - loss: 0.0023 - acc: 0.9991 - val_loss: 0.0041 - val_acc: 0.9988 Epoch 11/18 19980/19980 [==============================] - 1s 51us/step - loss: 0.0022 - acc: 0.9989 - val_loss: 0.0041 - val_acc: 0.9988 Epoch 12/18 19980/19980 [==============================] - 1s 51us/step - loss: 0.0021 - acc: 0.9991 - val_loss: 0.0041 - val_acc: 0.9988 Epoch 13/18 19980/19980 [==============================] - 1s 62us/step - loss: 0.0020 - acc: 0.9992 - val_loss: 0.0040 - val_acc: 0.9988 Epoch 14/18 19980/19980 [==============================] - 1s 56us/step - loss: 0.0020 - acc: 0.9991 - val_loss: 0.0039 - val_acc: 0.9990 Epoch 15/18 19980/19980 [==============================] - 1s 64us/step - loss: 0.0019 - acc: 0.9993 - val_loss: 0.0039 - val_acc: 0.9990 Epoch 16/18 19980/19980 [==============================] - 1s 58us/step - loss: 0.0018 - acc: 0.9991 - val_loss: 0.0039 - val_acc: 0.9990 Epoch 17/18 19980/19980 [==============================] - 1s 56us/step - loss: 0.0017 - acc: 0.9993 - val_loss: 0.0039 - val_acc: 0.9990 Epoch 18/18 19980/19980 [==============================] - 1s 62us/step - loss: 0.0016 - acc: 0.9994 - val_loss: 0.0039 - val_acc: 0.9990
model.fit(X_train_enh, y_train_enh, batch_size=batch_size, epochs=2, validation_split=0.2, shuffle=True, callbacks=[])
Train on 20000 samples, validate on 5000 samples Epoch 1/2 20000/20000 [==============================] - 1s 52us/step - loss: 0.0131 - acc: 0.9974 - val_loss: 0.0068 - val_acc: 0.9980 Epoch 2/2 20000/20000 [==============================] - 1s 51us/step - loss: 0.0077 - acc: 0.9984 - val_loss: 0.0067 - val_acc: 0.9976
<keras.callbacks.History at 0x145247668>
y_pred = model.predict(X_test, verbose=1)
df = save_test_to_csv(y_pred, 'merge', test_gen)
12500/12500 [==============================] - 0s 37us/step Saving test result on: pred_merge.csv
P = shuffle(df['label'], random_state=2018)
P = P[P < 0.8]
P = P[P > 0.2]
plt.figure(figsize=(10, 8))
i=0
for i, (p,index) in enumerate(zip(P[i:15+i], P.index[i:15+i])):
img = cv2.resize(plt.imread(os.path.join(test_folder, str(index+1)+'.jpg')), (300, 300))
plt.subplot(3, 5, i+1)
label = 'dog' if p>=0.5 else 'cat'
p = 1-p if p<0.5 else p
plt.title(label + ' {:.2f}%'.format(p*100) + '\n' + str(index+1)+'.jpg')
plt.axis('off')
plt.imshow(img)
plt.show()